text
stringlengths 67
1.03M
| metadata
dict |
---|---|
# Notebook from Ines-Filipa/theonerig
Path: 11_synchro.extracting.ipynb
<code>
#default_exp synchro.extracting_____no_output_____from nbdev.showdoc import *_____no_output_____%load_ext autoreload
%autoreload 2_____no_output_____
</code>
# synchro.extracting
> Function to extract data of an experiment from 3rd party programs_____no_output_____To align timeseries of an experiment, we need to read logs and import data produced by 3rd party softwares used during the experiment. It includes:
* QDSpy logging
* Numpy arrays of the stimuli
* SpykingCircus spike sorting refined with Phy
* Eye tracking results from MaskRCNN_____no_output_____
<code>
#export
import numpy as np
import datetime
import os, glob
import csv
import re
from theonerig.synchro.io import *
from theonerig.utils import *
def get_QDSpy_logs(log_dir):
"""Factory function to generate QDSpy_log objects from all the QDSpy logs of the folder `log_dir`"""
log_names = glob.glob(os.path.join(log_dir,'[0-9]*.log'))
qdspy_logs = [QDSpy_log(log_name) for log_name in log_names]
for qdspy_log in qdspy_logs:
qdspy_log.find_stimuli()
return qdspy_logs
class QDSpy_log:
"""Class defining a QDSpy log.
It reads the log it represent and extract the stimuli information from it:
- Start and end time
- Parameters like the md5 key
- Frame delays
"""
def __init__(self, log_path):
self.log_path = log_path
self.stimuli = []
self.comments = []
def _extract_data(self, data_line):
data = data_line[data_line.find('{')+1:data_line.find('}')]
data_splitted = data.split(',')
data_dict = {}
for data in data_splitted:
ind = data.find("'")
if type(data[data.find(":")+2:]) is str:
data_dict[data[ind+1:data.find("'",ind+1)]] = data[data.find(":")+2:][1:-1]
else:
data_dict[data[ind+1:data.find("'",ind+1)]] = data[data.find(":")+2:]
return data_dict
def _extract_time(self,data_line):
return datetime.datetime.strptime(data_line.split()[0], '%Y%m%d_%H%M%S')
def _extract_delay(self,data_line):
ind = data_line.find('#')
index_frame = int(data_line[ind+1:data_line.find(' ',ind)])
ind = data_line.find('was')
delay = float(data_line[ind:].split(" ")[1])
return (index_frame, delay)
def __repr__(self):
return "\n".join([str(stim) for stim in self.stimuli])
@property
def n_stim(self):
return len(self.stimuli)
@property
def stim_names(self):
return [stim.name for stim in self.stimuli]
def find_stimuli(self):
"""Find the stimuli in the log file and return the list of the stimuli
found by this object."""
with open(self.log_path, 'r', encoding="ISO-8859-1") as log_file:
for line in log_file:
if "DATA" in line:
data_juice = self._extract_data(line)
if 'stimState' in data_juice.keys():
if data_juice['stimState'] == "STARTED" :
curr_stim = Stimulus(self._extract_time(line))
curr_stim.set_parameters(data_juice)
self.stimuli.append(curr_stim)
stimulus_ON = True
elif data_juice['stimState'] == "FINISHED" or data_juice['stimState'] == "ABORTED":
curr_stim.is_aborted = data_juice['stimState'] == "ABORTED"
curr_stim.stop_time = self._extract_time(line)
stimulus_ON = False
elif 'userComment' in data_juice.keys():
pass
#print("userComment, use it to bind logs to records")
elif stimulus_ON: #Information on stimulus parameters
curr_stim.set_parameters(data_juice)
# elif 'probeX' in data_juice.keys():
# print("Probe center not implemented yet")
if "WARNING" in line and "dt of frame" and stimulus_ON:
curr_stim.frame_delay.append(self._extract_delay(line))
if curr_stim.frame_delay[-1][1] > 2000/60: #if longer than 2 frames could be bad
print(curr_stim.name, " ".join(line.split()[1:])[:-1])
return self.stimuli
class Stimulus:
"""Stimulus object containing information about it's presentation.
- start_time : a datetime object)
- stop_time : a datetime object)
- parameters : Parameters extracted from the QDSpy
- md5 : The md5 hash of that compiled version of the stimulus
- name : The name of the stimulus
"""
def __init__(self,start):
self.start_time = start
self.stop_time = None
self.parameters = {}
self.md5 = None
self.name = "NoName"
self.frame_delay = []
self.is_aborted = False
def set_parameters(self, parameters):
self.parameters.update(parameters)
if "_sName" in parameters.keys():
self.name = parameters["_sName"]
if "stimMD5" in parameters.keys():
self.md5 = parameters["stimMD5"]
def __str__(self):
return "%s %s at %s" %(self.name+" "*(24-len(self.name)),self.md5,self.start_time)
def __repr__(self):
return self.__str__()_____no_output_____
</code>
To read QDSpy logs of your experiment, simply provide the folder containing the log you want to read to `get_QDSpy_logs`_____no_output_____
<code>
#logs = get_QDSpy_logs("./files/basic_synchro")flickering_bars_pr WARNING dt of frame #15864 was 50.315 m
flickering_bars_pr WARNING dt of frame #19477 was 137.235 m
</code>
It returns a list fo the QDSpy logs. Stimuli are contained in a list inside each log:_____no_output_____
<code>
#logs[0].stimuli_____no_output_____
</code>
The stimuli objects contains informations on how their display went: _____no_output_____
<code>
# stim = logs[0].stimuli[5]
# print(stim.name, stim.start_time, stim.frame_delay, stim.md5)flickering_bars_pr 2020-03-31 17:30:25 [(15864, 50.315), (19477, 137.235)] 0049591cdf7aa379a458230e84cc3eec
#export
def unpack_stim_npy(npy_dir, md5_hash):
"""Find the stimuli of a given hash key in the npy stimulus folder. The stimuli are in a compressed version
comprising three files. inten for the stimulus values on the screen, marker for the values of the marker
read by a photodiode to get the stimulus timing during a record, and an optional shader that is used to
specify informations about a shader when used, like for the moving gratings."""
#Stimuli can be either npy or npz (useful when working remotely)
def find_file(ftype):
flist = glob.glob(os.path.join(npy_dir, "*_"+ftype+"_"+md5_hash+".npy"))
if len(flist)==0:
flist = glob.glob(os.path.join(npy_dir, "*_"+ftype+"_"+md5_hash+".npz"))
res = np.load(flist[0])["arr_0"]
else:
res = np.load(flist[0])
return res
inten = find_file("intensities")
marker = find_file("marker")
shader, unpack_shader = None, None
if len(glob.glob(os.path.join(npy_dir, "*_shader_"+md5_hash+".np*")))>0:
shader = find_file("shader")
unpack_shader = np.empty((np.sum(marker[:,0]), *shader.shape[1:]))
#The latter unpacks the arrays
unpack_inten = np.empty((np.sum(marker[:,0]), *inten.shape[1:]))
unpack_marker = np.empty(np.sum(marker[:,0]))
cursor = 0
for i, n_frame in enumerate(marker[:,0]):
unpack_inten[cursor:cursor+n_frame] = inten[i]
unpack_marker[cursor:cursor+n_frame] = marker[i, 1]
if shader is not None:
unpack_shader[cursor:cursor+n_frame] = shader[i]
cursor += n_frame
return unpack_inten, unpack_marker, unpack_shader_____no_output_____# logs = get_QDSpy_logs("./files/basic_synchro")flickering_bars_pr WARNING dt of frame #15864 was 50.315 m
flickering_bars_pr WARNING dt of frame #19477 was 137.235 m
</code>
To unpack the stimulus values, provide the folder of the numpy arrays and the hash of the stimulus:_____no_output_____
<code>
# unpacked = unpack_stim_npy("./files/basic_synchro/stimulus_data", "eed21bda540934a428e93897908d049e")_____no_output_____
</code>
Unpacked is a tuple, where the first element is the intensity of shape (n_frames, n_colors, y, x)_____no_output_____
<code>
# unpacked[0].shape_____no_output_____
</code>
The second element of the tuple repesents the marker values for the timing. QDSpy defaults are zero and ones, but I used custom red squares taking intensities [50,100,150,200,250] to time with five different signals_____no_output_____
<code>
# unpacked[1][:50]_____no_output_____
</code>
Each stimulus is also starting with a barcode, of the form:
0 0 0 0 0 0 4 0 4\*[1-4] 0 4\*[1-4] 0 4\*[1-4] 0 4\*[1-4] 0 4 0 0 0 0 0 0
and ends with 0 0 0 0 0 0_____no_output_____
<code>
#export
def extract_spyking_circus_results(dir_, record_basename):
"""Extract the good cells of a record. Overlap with phy_results_dict."""
phy_dir = os.path.join(dir_,record_basename+"/"+record_basename+".GUI")
phy_dict = phy_results_dict(phy_dir)
good_clusters = []
with open(os.path.join(phy_dir,'cluster_group.tsv'), 'r') as tsvfile:
spamreader = csv.reader(tsvfile, delimiter='\t', quotechar='|')
for i,row in enumerate(spamreader):
if row[1] == "good":
good_clusters.append(int(row[0]))
good_clusters = np.array(good_clusters)
phy_dict["good_clusters"] = good_clusters
return phy_dict_____no_output_____#export
def extract_best_pupil(fn):
"""From results of MaskRCNN, go over all or None pupil detected and select the best pupil.
Each pupil returned is (x,y,width,height,angle,probability)"""
pupil = np.load(fn, allow_pickle=True)
filtered_pupil = np.empty((len(pupil), 6))
for i, detected in enumerate(pupil):
if len(detected)>0:
best = detected[0]
for detect in detected[1:]:
if detect[5]>best[5]:
best = detect
filtered_pupil[i] = np.array(best)
else:
filtered_pupil[i] = np.array([0,0,0,0,0,0])
return filtered_pupil_____no_output_____#export
def stack_len_extraction(stack_info_dir):
"""Extract from ImageJ macro directives the size of the stacks acquired."""
ptrn_nFrame = r".*number=(\d*) .*"
l_epochs = []
for fn in glob.glob(os.path.join(stack_info_dir, "*.txt")):
with open(fn) as f:
line = f.readline()
l_epochs.append(int(re.findall(ptrn_nFrame, line)[0]))
return l_epochs_____no_output_____#hide
from nbdev.export import *
notebook2script()Converted 00_core.ipynb.
Converted 01_utils.ipynb.
Converted 02_processing.ipynb.
Converted 03_modelling.ipynb.
Converted 04_plotting.ipynb.
Converted 05_database.ipynb.
Converted 10_synchro.io.ipynb.
Converted 11_synchro.extracting.ipynb.
Converted 12_synchro.processing.ipynb.
Converted 99_testdata.ipynb.
Converted index.ipynb.
</code>
| {
"repository": "Ines-Filipa/theonerig",
"path": "11_synchro.extracting.ipynb",
"matched_keywords": [
"ImageJ"
],
"stars": null,
"size": 18735,
"hexsha": "d082ef79bb0dfa3c9458937721c74f20c3add4a7",
"max_line_length": 350,
"avg_line_length": 34.4393382353,
"alphanum_fraction": 0.5215906058
} |
# Notebook from SamH3pn3r/DS-Unit-4-Sprint-3-Deep-Learning
Path: module2-convolutional-neural-networks/LS_DS_432_Convolutional_Neural_Networks_Lecture.ipynb
Lambda School Data Science
*Unit 4, Sprint 3, Module 2*
---_____no_output_____# Convolutional Neural Networks (Prepare)
> Convolutional networks are simply neural networks that use convolution in place of general matrix multiplication in at least one of their layers. *Goodfellow, et al.*_____no_output_____## Learning Objectives
- <a href="#p1">Part 1: </a>Describe convolution and pooling
- <a href="#p2">Part 2: </a>Apply a convolutional neural network to a classification task
- <a href="#p3">Part 3: </a>Use a pre-trained convolution neural network for object detection
Modern __computer vision__ approaches rely heavily on convolutions as both a dimensinoality reduction and feature extraction method. Before we dive into convolutions, let's talk about some of the common computer vision applications:
* Classification [(Hot Dog or Not Dog)](https://www.youtube.com/watch?v=ACmydtFDTGs)
* Object Detection [(YOLO)](https://www.youtube.com/watch?v=MPU2HistivI)
* Pose Estimation [(PoseNet)](https://ai.googleblog.com/2019/08/on-device-real-time-hand-tracking-with.html)
* Facial Recognition [Emotion Detection](https://www.cbronline.com/wp-content/uploads/2018/05/Mona-lIsa-test-570x300.jpg)
* and *countless* more
We are going to focus on classification and pre-trained object detection today. What are some of the applications of object detection?_____no_output_____
<code>
from IPython.display import YouTubeVideo
YouTubeVideo('MPU2HistivI', width=600, height=400)_____no_output_____
</code>
# Convolution & Pooling (Learn)
<a id="p1"></a>_____no_output_____## Overview
Like neural networks themselves, CNNs are inspired by biology - specifically, the receptive fields of the visual cortex.
Put roughly, in a real brain the neurons in the visual cortex *specialize* to be receptive to certain regions, shapes, colors, orientations, and other common visual features. In a sense, the very structure of our cognitive system transforms raw visual input, and sends it to neurons that specialize in handling particular subsets of it.
CNNs imitate this approach by applying a convolution. A convolution is an operation on two functions that produces a third function, showing how one function modifies another. Convolutions have a [variety of nice mathematical properties](https://en.wikipedia.org/wiki/Convolution#Properties) - commutativity, associativity, distributivity, and more. Applying a convolution effectively transforms the "shape" of the input.
One common confusion - the term "convolution" is used to refer to both the process of computing the third (joint) function and the process of applying it. In our context, it's more useful to think of it as an application, again loosely analogous to the mapping from visual field to receptive areas of the cortex in a real animal._____no_output_____
<code>
from IPython.display import YouTubeVideo
YouTubeVideo('IOHayh06LJ4', width=600, height=400)_____no_output_____
</code>
## Follow Along
Let's try to do some convolutions in `Keras`._____no_output_____### Convolution - an example
Consider blurring an image - assume the image is represented as a matrix of numbers, where each number corresponds to the color value of a pixel._____no_output_____
<code>
import imageio
import matplotlib.pyplot as plt
from skimage import color, io
from skimage.exposure import rescale_intensity
austen = io.imread('https://dl.airtable.com/S1InFmIhQBypHBL0BICi_austen.jpg')
austen_grayscale = rescale_intensity(color.rgb2gray(austen))
austen_grayscale_____no_output_____plt.imshow(austen_grayscale, cmap="gray");_____no_output_____import scipy.ndimage as nd
import numpy as np
horizontal_edge_convolution = np.array([[1,1,1,1,1],
[0,0,0,0,0],
[0,0,0,0,0],
[0,0,0,0,0],
[-1,-1,-1,-1,-1]])
vertical_edge_convolution = np.array([[1, 0, 0, 0, -1],
[1, 0, 0, 0, -1],
[1, 0, 0, 0, -1],
[1, 0, 0, 0, -1],
[1, 0, 0, 0, -1]])
austen_edges = nd.convolve(austen_grayscale, vertical_edge_convolution)
#austen_edges_____no_output_____plt.imshow(austen_edges, cmap="gray");_____no_output_____
</code>
## Challenge
You will be expected to be able to describe convolution. _____no_output_____# CNNs for Classification (Learn)_____no_output_____## Overview_____no_output_____### Typical CNN Architecture

The first stage of a CNN is, unsurprisingly, a convolution - specifically, a transformation that maps regions of the input image to neurons responsible for receiving them. The convolutional layer can be visualized as follows:

The red represents the original input image, and the blue the neurons that correspond.
As shown in the first image, a CNN can have multiple rounds of convolutions, [downsampling](https://en.wikipedia.org/wiki/Downsampling_(signal_processing)) (a digital signal processing technique that effectively reduces the information by passing through a filter), and then eventually a fully connected neural network and output layer. Typical output layers for a CNN would be oriented towards classification or detection problems - e.g. "does this picture contain a cat, a dog, or some other animal?"
Why are CNNs so popular?
1. Compared to prior image learning techniques, they require relatively little image preprocessing (cropping/centering, normalizing, etc.)
2. Relatedly, they are *robust* to all sorts of common problems in images (shifts, lighting, etc.)
Actually training a cutting edge image classification CNN is nontrivial computationally - the good news is, with transfer learning, we can get one "off-the-shelf"!_____no_output_____## Follow Along_____no_output_____
<code>
from tensorflow.keras import datasets
from tensorflow.keras.models import Sequential, Model # <- May Use
from tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, Flatten
import matplotlib.pyplot as plt_____no_output_____(train_images, train_labels), (test_images, test_labels) = datasets.cifar10.load_data()
# Normalize pixel values to be between 0 and 1
train_images, test_images = train_images / 255.0, test_images / 255.0Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
170500096/170498071 [==============================] - 41s 0us/step
class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck']
plt.figure(figsize=(10,10))
for i in range(25):
plt.subplot(5,5,i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(train_images[i], cmap=plt.cm.binary)
# The CIFAR labels happen to be arrays,
# which is why you need the extra index
plt.xlabel(class_names[train_labels[i][0]])
plt.show()_____no_output_____train_images[0].shape_____no_output_____# Setup Architecture
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(32, 32, 3)))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(Flatten())
model.add(Dense(64, activation='relu'))
model.add(Dense(10, activation='softmax'))
model.summary()_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_9 (Conv2D) (None, 30, 30, 32) 896
_________________________________________________________________
max_pooling2d_7 (MaxPooling2 (None, 15, 15, 32) 0
_________________________________________________________________
conv2d_10 (Conv2D) (None, 13, 13, 64) 18496
_________________________________________________________________
max_pooling2d_8 (MaxPooling2 (None, 6, 6, 64) 0
_________________________________________________________________
conv2d_11 (Conv2D) (None, 4, 4, 64) 36928
_________________________________________________________________
flatten_1 (Flatten) (None, 1024) 0
_________________________________________________________________
dense (Dense) (None, 64) 65600
_________________________________________________________________
dense_1 (Dense) (None, 10) 650
=================================================================
Total params: 122,570
Trainable params: 122,570
Non-trainable params: 0
_________________________________________________________________
# Compile Model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])_____no_output_____# Fit Model
model.fit(train_images, train_labels, epochs=10, validation_data=(test_images, test_labels));Train on 50000 samples, validate on 10000 samples
Epoch 1/10
50000/50000 [==============================] - 465s 9ms/sample - loss: 1.5302 - acc: 0.4419 - val_loss: 1.2764 - val_acc: 0.5435
Epoch 2/10
50000/50000 [==============================] - 484s 10ms/sample - loss: 1.1523 - acc: 0.5943 - val_loss: 1.0657 - val_acc: 0.6253
Epoch 3/10
50000/50000 [==============================] - 445s 9ms/sample - loss: 0.9975 - acc: 0.6495 - val_loss: 0.9984 - val_acc: 0.6472
Epoch 4/10
50000/50000 [==============================] - 437s 9ms/sample - loss: 0.8981 - acc: 0.6852 - val_loss: 0.9317 - val_acc: 0.6698
Epoch 5/10
50000/50000 [==============================] - 461s 9ms/sample - loss: 0.8207 - acc: 0.7126 - val_loss: 0.8993 - val_acc: 0.6894
Epoch 6/10
50000/50000 [==============================] - 479s 10ms/sample - loss: 0.7669 - acc: 0.7301 - val_loss: 0.9009 - val_acc: 0.6917
Epoch 7/10
50000/50000 [==============================] - 428s 9ms/sample - loss: 0.7121 - acc: 0.7501 - val_loss: 0.8806 - val_acc: 0.6979
Epoch 8/10
50000/50000 [==============================] - 404s 8ms/sample - loss: 0.6663 - acc: 0.7652 - val_loss: 0.8891 - val_acc: 0.6980
Epoch 9/10
50000/50000 [==============================] - 400s 8ms/sample - loss: 0.6314 - acc: 0.7769 - val_loss: 0.9083 - val_acc: 0.6970
Epoch 10/10
50000/50000 [==============================] - 401s 8ms/sample - loss: 0.5920 - acc: 0.7937 - val_loss: 0.9412 - val_acc: 0.7020
# Evaluate Model
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2) - 25s - loss: 0.9412 - acc: 0.7020
</code>
## Challenge
You will apply CNNs to a classification task in the module project._____no_output_____# CNNs for Object Detection (Learn)_____no_output_____## Overview_____no_output_____### Transfer Learning - TensorFlow Hub
"A library for reusable machine learning modules"
This lets you quickly take advantage of a model that was trained with thousands of GPU hours. It also enables transfer learning - reusing a part of a trained model (called a module) that includes weights and assets, but also training the overall model some yourself with your own data. The advantages are fairly clear - you can use less training data, have faster training, and have a model that generalizes better.
https://www.tensorflow.org/hub/
**WARNING** - Dragons ahead!

TensorFlow Hub is very bleeding edge, and while there's a good amount of documentation out there, it's not always updated or consistent. You'll have to use your problem-solving skills if you want to use it!_____no_output_____## Follow Along_____no_output_____
<code>
import numpy as np
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions
def process_img_path(img_path):
return image.load_img(img_path, target_size=(224, 224))
def img_contains_banana(img):
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
model = ResNet50(weights='imagenet')
features = model.predict(x)
results = decode_predictions(features, top=3)[0]
print(results)
for entry in results:
if entry[1] == 'banana':
return entry[2]
return 0.0_____no_output_____import requests
image_urls = ["https://github.com/LambdaSchool/ML-YouOnlyLookOnce/raw/master/sample_data/negative_examples/example11.jpeg",
"https://github.com/LambdaSchool/ML-YouOnlyLookOnce/raw/master/sample_data/positive_examples/example0.jpeg"]
for _id,img in enumerate(image_urls):
r = requests.get(img)
with open(f'example{_id}.jpg', 'wb') as f:
f.write(r.content)_____no_output_____from IPython.display import Image
Image(filename='./example0.jpg', width=600)_____no_output_____img_contains_banana(process_img_path('example0.jpg'))WARNING:tensorflow:From C:\Users\Samue\Anaconda3\envs\U4-S3-DNN\lib\site-packages\tensorflow\python\ops\resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.
[('n04037443', 'racer', 0.91754997), ('n04285008', 'sports_car', 0.07783192), ('n04461696', 'tow_truck', 0.0023059668)]
Image(filename='example1.jpg', width=600)_____no_output_____img_contains_banana(process_img_path('example1.jpg'))[('n07753592', 'banana', 0.06643853), ('n03532672', 'hook', 0.06110267), ('n03498962', 'hatchet', 0.05880436)]
</code>
Notice that, while it gets it right, the confidence for the banana image is fairly low. That's because so much of the image is "not-banana"! How can this be improved? Bounding boxes to center on items of interest._____no_output_____## Challenge
You will be expected to apply a pretrained model to a classificaiton problem today. _____no_output_____# Review
- <a href="#p1">Part 1: </a>Describe convolution and pooling
* A Convolution is a function applied to another function to produce a third function
* Convolutional Kernels are typically 'learned' during the process of training a Convolution Neural Network
* Pooling is a dimensionality reduction technique that uses either Max or Average of a feature max region to downsample data
- <a href="#p2">Part 2: </a>Apply a convolutional neural network to a classification task
* Keras has layers for convolutions :)
- <a href="#p3">Part 3: </a>Use a pre-trained convolution neural network for object detection
* Check out both pretinaed models available in Keras & TensorFlow Hub_____no_output_____# Sources
- *_Deep Learning_*. Goodfellow *et al.*
- [Keras CNN Tutorial](https://www.tensorflow.org/tutorials/images/cnn)
- [Tensorflow + Keras](https://www.tensorflow.org/api_docs/python/tf/keras/layers/Conv2D)
- [Convolution Wiki](https://en.wikipedia.org/wiki/Convolution)
- [Keras Conv2D: Working with CNN 2D Convolutions in Keras](https://missinglink.ai/guides/keras/keras-conv2d-working-cnn-2d-convolutions-keras/)_____no_output_____
| {
"repository": "SamH3pn3r/DS-Unit-4-Sprint-3-Deep-Learning",
"path": "module2-convolutional-neural-networks/LS_DS_432_Convolutional_Neural_Networks_Lecture.ipynb",
"matched_keywords": [
"biology"
],
"stars": null,
"size": 814670,
"hexsha": "d084e2f49b78002b5a58c6d00cf1f11a12ac9042",
"max_line_length": 315293,
"avg_line_length": 974.485645933,
"alphanum_fraction": 0.9551118858
} |
# Notebook from brit228/AB-Demo
Path: module2-Bag-of-Words/LS_DS_422_BOW_Assignment.ipynb
<a href="https://colab.research.google.com/github/brit228/AB-Demo/blob/master/module2-Bag-of-Words/LS_DS_422_BOW_Assignment.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____
<code>
import re
import string
!pip install -U nltk
import nltk
nltk.download('punkt')
nltk.download('stopwords')
nltk.download('wordnet')
from nltk.tokenize import sent_tokenize # Sentence Tokenizer
from nltk.tokenize import word_tokenize # Word Tokenizer
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.probability import FreqDist
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
import matplotlib.pyplot as plt
import pandas as pd
import numpy as npRequirement already up-to-date: nltk in /usr/local/lib/python3.6/dist-packages (3.4)
Requirement already satisfied, skipping upgrade: singledispatch in /usr/local/lib/python3.6/dist-packages (from nltk) (3.4.0.3)
Requirement already satisfied, skipping upgrade: six in /usr/local/lib/python3.6/dist-packages (from nltk) (1.11.0)
</code>
# 1) (optional) Scrape 100 Job Listings that contain the title "Data Scientist" from indeed.com
At a minimum your final dataframe of job listings should contain
- Job Title
- Job Description_____no_output_____
<code>
import requests
from bs4 import BeautifulSoup
addition = ""
i = 0
data = []
while True:
r = requests.get("https://www.indeed.com/jobs?q=data%20scientist&l=Boston,%20MA"+addition)
soup = BeautifulSoup(r.text, 'html.parser')
for card in soup.find_all('div', class_="jobsearch-SerpJobCard", attrs={"data-tn-component": "organicJob"}):
try:
d = {}
d["Job Title"] = card.h2.a.text
d["Company"] = card.find("span", class_="company").text.strip()
d["Location"] = card.find("span", class_="location").text.strip()
r2 = requests.get("https://www.indeed.com"+card.a["href"])
soup2 = BeautifulSoup(r2.text, 'html.parser')
d["Job Description"] = "\n".join([a.text for a in soup2.find("div", class_="jobsearch-JobComponent-description icl-u-xs-mt--md").contents])
data.append(d)
except:
pass
i += 10
print(i)
addition = "&start={}".format(i)
if len(data) > 100:
break
df = pd.DataFrame(data)
dfWho we are
Massachusetts Cannabis Research Laboratories (MCR Labs) is a rapidly growing, independent, and accredited cannabis testing and analytics laboratory.
We are a team of researchers, analytical chemists, pharmaceutical scientists, technologists, and cannabis enthusiasts working to advance the medical and recreational cannabis industries by helping providers ensure the safety and effectiveness of their products. We are committed to proving the best possible services to our clients, building relationships and supporting our community.
This is an exceptional career opportunity to join the team of a fast-paced company in a high-growth industry!
Job Summary
This position is responsible for the production of reports, extracting and analyzing output from internal software tool for client testing reports, and performing internal trend analysis.
Primary Job Responsibilities:
The position will be responsible for creating and running various reports from the internal software. Review and ensure all information on the reports is accurate and correct. Identify opportunities for process improvements in reporting and development, while providing value through insightful analytics and problem-solving. The reporting data analyst will be responsible for identifying data trends and performing data analysis. Timely and effectively communicate with coworkers and other internal departments any changes affecting client’s product. Present findings and make recommendations. Meet client deadlines for ad hoc reports and other requested information. In this dynamic role, the individual will coordinate reporting and act as the liaison with the external customers and internal team. Understand the purpose and content of requested reports, utilizing systems to create routine reports to meet clients’ needs, and audit and analyze data to verify quality and data integrity. Identify and research data anomalies.
Primary Goal
The primary goal of the Reporting Data Analyst is to prepare accurate client analytical testing reports. Review all reports for accuracy before sending to the client. Analyze and develop recommendations on cannabis laboratory reporting processes.
About You
Preferred Skills and Qualifications
BS/BA degree in chemistry, computer science, physics, mathematics or a related fieldExperience working in a labKnowledge of knowledge of Chemistry, specifically ChromatographyKnowledge of Mass SpectrometryAbility to perform repetitive tasks with a high level of successVery high level of organization and attention to detail with a sense of self-directionAbility to read, analyze, and interpret technical dataWillingness to work and learnAbility to adapt and learn quicklyDesire to learn about product and industryHave advanced knowledge of Excel, macros, and functions to enhance current reportingStrong computer skillsAbility to work independently and communicate effectively in a team environmentEffective time management skills – this is a fast-paced environment and the successful candidate will be able to multi-task and change focus very quickly while maintaining their ability to be effective
Do you have what it takes to be a part of an industry leading cannabis testing company?
Come join the Green Rush!
Job Type: Full-time Job Location: Framingham, MA 01701
Required education: Bachelor's
Additional Requirements: Ability to work on weekends, must be fluent in English and authorized to work in US.
MCR Labs is an equal opportunity employer and we value diversity at our company. We do not discriminate on the basis of race, religion, color, national origin, gender, sexual orientation, age, marital status, genetic information, veteran status, or disability status.
All qualified applicants must be able to satisfy the Department of Homeland Security Form I-9 requirement, with the documents necessary to verify identity & employment authorization.
Job Type: Full-time
Experience:
Data Analysis: 1 year (Required)Excel: 2 years (Required)Chromatography: 1 year (Required)
Education:
Bachelor's (Required)
Benefits offered:
Paid time offHealth insuranceDental insurance
$68,809 a year
TO APPLY:
Interested candidates must fully complete the online MSO application and attach a cover letter and resume through the online application process. The online application process can be found under the ‘Employment’ tab at www.middlesexsheriff.com.
The Middlesex Sheriff’s Office (MSO) is seeking a qualified professional for the position of Data Integration Specialist / Research Analyst to work in the Administrative Offices of the MSO located in Medford, MA. The Data Integration Specialist / Research Analyst will work directly with individual departments and administration to coordinate, streamline, and visualize an array of data being gathered at the MSO. The Data Integration Specialist / Research Analyst will also be responsible for gathering, linking, and cleaning data that will be used for reporting requirements. The Data Integration Specialist / Research Analyst will also work with the Chief Financial Officer and Director of Fiscal Operations on grant funded projects, reporting requirements, and other budget-related initiatives. This is an exempt, full time, at-will employment position that reports to the Director of Fiscal Operations and Chief Financial Officer.
Additional responsibilities include, but are not limited to, the following:
Collecting, analyzing, interpreting and presenting quantitative and qualitative information;
Designing and managing experiments, surveys and data collection;
Designing data warehouse/reporting schemas that advance the MSO’s reporting needs;
Collaborating with full-time and contractual staff to manage hardware and software systems (SQL, Excel);
Providing analysis of data obtained in context, looking for patterns and forecasting trends to help make recommendations for funding and policy changes;
Working with MSO staff to ensure that complex statistical concepts are explained efficiently and accurately;
Preparing materials for submission to granting agencies and foundations.
Working with other internal departments and outside public safety agencies to compile data and provide data visualizations for presentations; do one-off and repeatable Extract Transfer Load (ETL) work to support the MSO;
Building and managing ETL processes for internal departments;
Working closely with data analysts to build data integrations that quickly and effectively solve reporting needs;
Other work duties as assigned by Director of Fiscal Operations and Chief Financial Officer.
The individual selected for the position of Data Integration Specialist / Research Analyst must have the ability to exercise good judgment and focus on detail as required by the job. The individual selected shall also be cognizant of the confidential and sensitive nature of working in a law enforcement agency and must comply with all institutional rules regarding safety and security.
Qualifications:
Bachelor's degree in Accounting/Finance, Business/Public Administration or a closely related field preferred;
Proven ability to gather, synthesize, and present current research and trends in criminal justice, healthcare, and social science.
Mandatory proficiencies should include: Microsoft Word; Excel; Access; SQL query and data transformation skills, including development of Dashboards, Crystal Reports, and MS Access-based Reports. Microsoft SQL Server expertise a plus.
Strong knowledge of industry research methodologies and techniques.
Ability to work independently, as well as, and with other internal departments in a fast paced environment and execute close attention to detail;
Strong written, communication, organizational, analytical, problem-solving and time-management skills;
Ability to complete multiple projects in a timely and accurate manner while maintaining comprehensive and cohesive records.
In compliance with federal law, all persons hired will be required to verify identity and eligibility to work in the United States and to complete the required employment eligibility verification form upon hire. Must be a Massachusetts resident, have an active valid driver’s license and the right to legally operate a motor vehicle in Massachusetts and pass a criminal background check.
Salary: Starting annual base salary from $68,809 plus additional benefits.
Submissions must be received by Friday, March 22, 2019 at 4:00p.m.
The Middlesex Sheriff’s Office is an equal opportunity employer
Job Type: Full-time
Education:
Bachelor's (Preferred)
License:
active valid driver’s (Required)
Due to BitSight’s rapid growth, and our need to better understand and glean insights from our marketing data, we are recruiting a data scientist to join the marketing team. This is a new role in the marketing department that you will help shape. You will also have the opportunity to work with BitSight’s Security Data Science team to stay on the bleeding edge of data science/AI with a bi-weekly internal reading group, technology-specific workshops, and conference attendance.
Responsibilities
Look for signals in our large data sets and build models that leverage these signals to glean insights from our marketing and customer data.
Determine the buyer’s journey from first touch on the website all the way through retention and upsell/cross-sell. Make recommendations on what changes to make to content and programs across the funnel from awareness to consideration to selection to upsell to drive revenue growth.
Lead machine learning projects using varied advanced analytical techniques for estimating current and future customer engagement, prospecting new customers, cross-selling to existing customers (response models), and identifying customers likely to leave.
Develop and test multiple hypotheses using results from analyses to generate and answer new questions leading to more in-depth understanding of customer behavior being studied.
Collaborate with internal business partners across departments to integrate analytical tools and solutions into demand generation campaigns/initiatives.
Identify potential issues that arise during the course of research projects and escalate and communicate these to management and internal clients for awareness/ action/ resolution.
Make presentations of status and results of analysis to non-technical audience of various levels.
Projects are typically short, fast moving, and highly varied. You will need to manage multiple research projects under tight deadlines, as well as ensure research projects are completed on time and meet internal client expectations.
Desirable Skills
Strong coding skills: You are able to read in a data set, process it, and plot data. You can build modules that encapsulate functionality you have developed. You have some experience with Big Data tools, e.g., Spark, PrestoDB, Hive. We are a Python house and you need to know Python. Also important is the ability to quickly learn to use tools and packages new to you.
Strong statistical analysis skills: You are able to take a data set, estimate statistical parameters from it, and compare the results with existing data sets. You are comfortable with and knowledgeable about statistical concepts like p-values, hypothesis testing, and non-normal distributions. Again, of utmost importance is being comfortable learning new statistical methods.
Solid machine learning and statistical modeling skills: You are able to frame problems as estimating a target variable, build a dataset, define an evaluation metric, build a non-trivial baseline, and finally use appropriate techniques to beat the baseline when possible.
Strong team working skills: You’ll be able to work well with other departments (e.g. data science, product management, customer success), balance requirements, and work independently.
Ability to communicate effectively: Good results are a good starting point but we also need to communicate these results verbally, in writing, and visually.
Job Benefits
BitSight prides itself in building exceptional career opportunities and offering outstanding benefits to our team. In that regard, BitSight is not your average company. We have the enthusiasm of a start-up, a culture driven from industry veterans committed to long-term growth, and the benefits package of a mature industry leader. BitSight is a great place to work.
The purpose of this role is to partner with the respective Franchise analytics teams to ensure high quality analytic insights, recommendations, and data usage for the supported therapeutic area. Key responsibilities include: Analytics Execute analytics to support Multi-Channel, Patient, Payer/Provider, Life Cycle, Field Force, and Forecasting Determine the ideal methodology to apply for each analysis or process based on data availability and limitations Develop, validate and deploy predictive and diagnostic solutions using reusable code and computing paradigms Derive insights and recommendations from research and analyses that address both stated and unstated business questions Use statistical approaches, such as ANOVA, etc., leveraging statistical analysis toolsets, such as R, SPSS and SAS Process and analyze large health-related datasets ranging from small to Big Data and integrate and analyze Structured, Semi-structured, and Unstructured data Use tools for accessing, synthesizing, analyzing and reporting data Data Work with database technologies, SQL, NLP, data engineering, Hadoop Zoo, Kibana, visualization tools, graph analysis Collaboration Act as SME resource for broader Advanced Analytics community within supported therapeutic areas Foster common data-driven viewpoints between stakeholders with divergent views and objectives Connect technical and data skills to business needs to identify improvements with the project lifecycle Work with peers to ensure that resulting code is compliant and supports standardization whenever possible Proactively deliver analyses and reports based on timing for key planning processes Project Management Meet Franchise Leadership expectations by delivering work on time and within scope Fulfill assigned role within a project team delivering to expectations Ensure Takeda ethics and compliance are continuously met Responsibilities Job Function and Description 80% Deliver recommendations built on models and analytics 20% Develop data environment Education and Experience Requirements BA/BS degree in business, life sciences, or related technical discipline Master’s in statistics, mathematics, computer science, applied economics, computational biology, computational informatics, or medical informatics preferred 5+ years’ of relevant analytical or healthcare experience preferred Preferred Programming skills: Uses Statistical Modelling: e.g. SPSS, SAS, R Machine Learning Tools: e.g. Spark Visualization: e.g. QlikView/QlikSense, Tableau Data Environment: e.g. Datameer (Hadoop) Experience with large scale database applications (e.g., Oracle, Hadoop, Teradata) Experience detailing Big Data environment requirements Familiar with advanced data science methods Demonstrated proficiency with statistical methods, e.g. ANCOVA, two-tailed p-test, descriptive statistics, etc. Key Skills, Abilities, and Competencies Technical Skills Practiced skills in creating appropriate logic that answers stated and unstated business questions In-depth analytical and critical thinking skills to resolve issues efficiently and effectively Experienced in selecting and applying the appropriate methodology based on business need and data Demonstrable comfort using and applying structured statistical modeling tools and additional analytical modules or add-ons Expertise in writing reusable code to customize statistical models Business Acumen Understanding of goals and needs of supported functions Able to identify stakeholder needs through voice of customer and relevant data collection Experience in generating insights and recommendations from research and analyses that address both stated and unstated business questions Interpersonal Skills Experienced in presenting insights and conclusions from complex information in an easy to understand way Able to maintain an impartial point of view Builds marketing and sales leadership’s confidence through active listening, asking questions and accurately paraphrasing their needs and expectations Proactively engages with stakeholders to build relationships. Recognizes the need to modify communication styles to fit diverse audiences with various levels of expertise Fulfills assigned role within a project team delivering to expectations Complexity and Problem Solving Technical, data and analysis related decisions are within the incumbent’s authority. For business and stakeholder related decision he/she consults with the Advanced Analytics Lead Internal and External Contacts Internal Business Partner Contacts Advanced Analytics Community in Franchises U.S. Commercial Operations functions (Sales Operations, Marketing Operations, Learning and Development) IT, Regulatory, Medical, Compliance External Vendor Contracts Service Providers / Consultants Technology Solution Implementation Vendors Software Vendors Other Job Requirements 10% domestic travel may be required.
Notice to Employment / Recruitment Agents:
Employment / Recruitment agents may only submit candidates for vacancies only if they have written authorization to do so from Shire, a wholly-owned subsidiary of Takeda’s Talent Acquisition department. Any agency candidate submission may only be submitted to positions opened to the agency through the specific Agency Portal. Shire, a wholly-owned subsidiary of Takeda will only pay a fee for candidates submitted or presented where there is a fully executed contract in place between the Employment / Recruitment agents and Shire, a wholly-owned subsidiary of Takeda and only if the candidate is submitted via the Agency Portal. Candidates submitted or presented by Employment / Recruitment Agents without a fully executed contract or submitted through this site shall not be deemed to form part of any Engagement for which the Agency may claim remuneration.
Equal Employment Opportunity
Shire, a wholly-owned subsidiary of Takeda, is an Equal Opportunity Employer committed to a diverse workforce. Shire, a wholly-owned subsidiary of Takeda, will not discriminate against any worker or job applicant on the basis of race, color, religion, gender, national origin, ancestry, age, sexual orientation, marital or civil partnership status, pregnancy, gender reassignment, non-job related mental or physical disability, genetic information, veteran status, military service, application for military service, or membership in any other category protected under law.
EEO is the Law - https://www.dol.gov/ofccp/regs/compliance/posters/pdf/eeopost.pdf
EEO is the Law – Supplement - https://www.dol.gov/ofccp/regs/compliance/posters/pdf/OFCCP_EEO_Supplement_Final_JRF_QA_508c.pdf
Pay Transparency Policy - https://www.dol.gov/ofccp/pdf/pay-transp_formattedESQA508c.pdf
Reasonable Accommodations
Shire, a wholly-owned subsidiary of Takeda, is committed to working with and providing reasonable accommodation to individuals with disabilities. If, because of a medical condition or disability, you need a reasonable accommodation for any part of the application process, or in order to perform the essential functions of a position, please call 484-595-8400 and let us know the nature of your request and your contact information.
Description:
Are you passionate about applying data science to real business and customer needs? Would you like to use your data science skills to help our customers do more, feel more, and be more? At Bose, all of our energy is aimed at bringing products into the world that people truly love, and we don’t stop until the details are just right. Data science, machine learning, and analytics have become a crucial part of this mission . These capabilities fuel the creation of new and innovative products in consumer electronics and wellness, help us to bring the right products to the right customers, and allow us to astonish customers with carefully crafted and personalized experiences.
We are looking for a bright, enthusiastic data scientist for our new and growing Global Consumer Sales Data Science team out of the Boston Landing location. The mission of this team is to develop world-class data science, machine learning, and related technologies to extract insights from data for driving business and customer value. We provide data science expertise and support across the Sales, Marketing, Retail, and Customer Service organizations. The desired outcomes will include improved customer experiences, personalized recommendations, and digital optimization.
Responsibilities:
Develop and evaluate predictive and prescriptive models for marketing, sales, e-commerce, and customer service applications such as customer lifetime value models, product recommenders, customer segmentations, uplift models, and propensity models.
Explore large datasets related to customer and user behavior using modeling, analysis, and visualization techniques.
Apply frequentist and Bayesian statistical inference tools to experimental and observational data.
Collaborate with data science, data engineering, and data governance teams throughout the data science process. Engage with Global Consumer Sales colleagues to understand business problems and define data science solutions.
Communicate results, analyses, and methodologies to technical and business stakeholders.
Travel to Framingham, MA location at least once per week (shuttle from Boston Landing is available) .
Education:
BS or MS (preferred) in Data Science, Computer Science, Business Analytics, Statistics, or a related field
Completed coursework related to Statistics, Computer Science, Machine Learning, and Data Science
Completed coursework related to Business/Customer Analytics, Marketing, Sales, and/or Management
Skills:
2+ years of experience applying data science, machine learning, and analytics techniques to business problems, preferably related to sales and marketing
Strong programming background with experience in Python (preferred) or R
Strong understanding of unsupervised and supervised machine learning algorithms
Experience designing experiments and analyzing experimental data using statistical modeling
Strong analytical, communication, collaboration, and problem - solving skills
Experience cleaning and wrangling data using tools such as SQL and Python
(Preferred) Experience working with big data tools and frameworks such as Hadoop and Apache Spark
Bose is an equal opportunity employer that is committed to inclusion and diversity. We evaluate qualified applicants without regard to race, color, religion, sex, sexual orientation, gender identity, genetic information, national origin, age, disability, veteran status, or any other legally protected characteristics. For additional information, please review: (1) the EEO is the Law Poster (http://www.dol.gov/ofccp/regs/compliance/posters/pdf/OFCCP_EEO_Supplement_Final_JRF_QA_508c.pdf); and (2) its Supplements (http://www.dol.gov/ofccp/regs/compliance/posters/ofccpost.htm). Please note, the company's pay transparency is available at http://www.dol.gov/ofccp/pdf/EO13665_PrescribedNondiscriminationPostingLanguage_JRFQA508c.pdf. Bose is committed to working with and providing reasonable accommodations to individuals with disabilities. If you need a reasonable accommodation because of a disability for any part of the application or employment process, please send an e-mail to [email protected] and let us know the nature of your request and your contact information.
150
We are seeking a highly motivated Data Scientist for computational analysis of complex data across our R&D portfolio to advance Preclinical and Development-stage programs. The successful candidate will serve as a subject matter expert who will provide cross-functional guidance and support internally and externally. He/she will thrive in a fast-paced, highly-collaborative environment to advance program goals with deep expertise in developing models using multi-dimensional data sources.
Job Responsibilities:
Provide scientific input and leadership to enable the team to analyze complex data (e.g. genomics, transcriptomics and proteomics) from patient samples. Lead efforts to develop predictive models using statistical and computational biology approaches for biomarker development and patient stratification strategies.
Provide immediate support in multiple R&D-stage programs and to cross-functional clinical development teams through integrated analysis of clinical and biomarker data generated from multiple platforms and formats.
Pro-actively define statistical analysis plans to generate actionable results for meeting program and business objectives.
Communication and visualization of results to scientific and non-scientific audiences.
Proactively partner with core R&D functional leads to advance Clinical, Preclinical and Discovery program objectives and serve as internal expert in computational biology and biostatistical modeling.
Identify key scientific questions to advance our scientific understanding across the portfolio.
Adapt latest methods and tools for analyzing large omics datasets (genomics/proteomics).
Establish internal best practices for complex data visualization, integration, and accessibility.
Education and Experience:
Ph.D. in statistics, mathematics, bioinformatics, computational biology, genomics, computer science, or a related field with 5+ years of experience in complex, quantitative data analysis in a biotech/biopharm environment.
Experience working with big-data generated by diverse platforms (e.g. RNA-Seq, Flow Cytometry, multiplexed proteomics) and accessing and mining external datasets.
Experience working with clinical study data and compiling reports in a GxP-environment.
Ability to present and visualize data for communicating with scientific and non-scientific colleagues.
Proficiency in common programming languages such as Python, R, Matlab, Java, Shell and Linux environments.
Ability to work independently and collaboratively in highly dynamic, fast-paced projects within a highly-matrixed, cross-functional and collaborative environment.
Data Science Team
Nift brings new customers through the doors of neighborhood businesses better than anything else out there. Join the team that’s giving millions of people gifts they love while bringing in the foot traffic that makes neighborhood businesses thrive.
Data Science is the heart and core of our company. Algorithms and models are the foundation upon which our product is built, with data driving our key decisions, testing and growth. Our Chief Scientist, David C Parkes, is the former Area Dean of Computer Science at Harvard, the founder of Harvard’s EconCS Group, and the Co-Director of Harvard's Data Science Initiative. Our data doubles every two months with even more sources waiting to be added. Our product represents a completely new kind of marketplace and the science around it has yet to be defined. We’re looking for a Data Analyst to join our core team and drive growth and revenue.
This position is based in Boston, MA.
Examples of projects we currently need help with:
Analyze real-time data market economic data
Evaluate experimental results - On simulated data, real-time data and concurrent AB tests.
Collect data, build analysis and present it to monitor and understand the company revenue.
Estimate the demand in multiple local markets (in different sizes, locations and development stages).
Segment customers and businesses; offer insights and identify revenue growth opportunities within existing core verticals and new ones.
Monitor marketplace metrics and system performance and suggest corrective actions when needed.
We have an outstanding core team with deep understanding of algorithmic economics and data science. Our work is highly sought-after and is critical to the success of our business. If you have a proven track-record, want to make an impact and you get excited about the prospects of being part of something really special, we should talk.
Traits we value:
Solid understanding of statistics, economics, and math.
A keen eye for detail and thoughtful investigation of data.
A steadfast focus on creating impactful change and ability to prioritize between many tasks to maximize the improvement of the business.
2+ years of commercial experience is a plus.
A minimum of a Bachelor’s degree, a Master’s degree is preferred.
Collaborative team player who values the contribution of others.
We believe it's time technology starts working for Main Street's small businesses. Launched in Boston in the Summer of 2016, our start-up is helping millions of people discover great local businesses.
Klaviyo is looking for data scientists to analyze large data sets (we’re collecting billions of individual actions every month), build models and ship products that enable businesses to grow faster and communicate with their customers. Our background as a team is building these models for the Fortune 50 and we want to democratize and open up that technology to everyone.
The ideal candidate has a background in data science, statistics and machine learning and has done work ranging from exploratory analysis to training and deploying models. We use a wide variety of data mining and machine learning algorithms. The right candidate will have both a solid fundamental understanding and deep practical experience with at least a few modeling and machine learning techniques.
Our goal is always to match the right assumptions and models to the right problem - they don’t necessarily have to be complex. You should have experience building models that are used by people to make better decisions. We’re focused on shipping early and often. We prefer iterative solutions that are incrementally better to the perfect solution. You should also be able to measure and know what impact your models had on the decisions people made - e.g. did they outperform the previous best model or a human decision maker?
Our data science team is still in its early days and you’ll have a big impact on our direction and how we operate. You’ll be central to upfront research and shipping products that help our customers learn and grow from their data.
You:
Have a strong fundamental understanding and deep experience with at least a few machine learning algorithms (e.g. regressions, decision trees, k-means clustering, neural networks).
Understand Bayesian modeling techniques.
Are capable of analyzing data and making rigorous statements about what can or cannot be concluded.
Have experience designing and implementing model performance/validation assessments.
Have a background in statistics and understand different distributions and the conditions under which they’re valid.
Know how code and have used data science tools and packages.
Have demonstrated a measurable impact based on the models you’ve created. It’s not always easy getting a model correct and we love talking about places we got stuck and working as a team to think through ideas that could unblock us.
Have a desire to ship features powered by data science (in other words, you’re excited by both upfront research and actually getting models into production at cloud scale).
You Have:
Bachelor’s or advanced degree in statistics, applied mathematics, computer science or other relevant quantitative discipline, or equivalent industry experience.
Have worked in a data science role for 5+ years professionally or academically and can talk about your projects and the techniques you used.
About Us
Klaviyo is a team of people who are crazy motivated by growth.
It’s what we help our customers do: grow their businesses by making it possible and easy for them to use their data to power better marketing.
It’s how we behave as individuals: we’re all deeply passionate about learning.
It’s how we manage our business: we have thousands of paying customers, we’re profitable, and we’re growing insanely fast.
And it’s what our culture is all about. Working at Klaviyo means you’ll work on things you never imagined you would; you’ll grow in ways you didn’t consider possible; and you’ll do the best work of your career with people who are just as motivated and talented as you are.
If this sounds like your ideal place to work, drop us a note!
Under supervision, and within established departmental and hospital policies and procedures, is responsible for performing a variety of general and technical duties to support the research activities within the Translational Neuroimmunology Research Center (TNRC) in the Ann Romney Center for Neurologic Diseases (ARCND). The data analyst is to provide expertise to acquire, manage, manipulate, analyze data, and report the results. Data capture and reporting in collaboration with others will be a daily activity.
Qualifications
PRINCIPAL DUTIES:
a. To assist in data analysis related to the research of Multiple Sclerosis and
other Autoimmune diseases
b. Assist with data processing, archiving, storage and computer analysis
c. Create analysis datasets from an Oracle database
d. Oversee data cleaning and manipulation
e. Perform statistical analysis utilizing SAS, STATA, or R
f. Identify, analyze and interpret trends or patterns in complex data sets
g. Perform data entry when needed or required
h. Develop graphs, reports and presentations of project results
i. In collaboration with others; develop and maintain databases and data
systems necessary for projects and department functions.
PREFERRED QUALIFICATIONS:
Experience with Red Cap
Experience with SAS, STATA or R
Programming Skills to execute queries in Oracle database and knowledge of SQL and XML
At least 2 years in a research setting looking at multiple sclerosis or neurological data
MPH in epidemiology, biostatistics or a related field
SKILLS/ABILITIES/COMPETENCIES REQUIRED:
Proficiency in Microsoft Office suite, including Excel and Access
Bachelor’s or master’s degree, preferable in Information Management, Healthcare Information, Computing, Mathematics, Statistics or related fields
Proficiency with statistics in order to communicate easily with other statisticians
WORKING CONDITIONS:
Office space environment and occasional clinical center exposure.
EEO Statement
Brigham and Women’s Hospital is an Equal Opportunity Employer. All qualified applicants will receive consideration for employment without regard to race, color, religion, creed, sex, sexual orientation, gender identity, national origin, ancestry, age, veteran status, disability unrelated to job requirements, genetic information, military service, or other protected status.
Primary Location: MA-Boston-BWH Longwood Medical Area
Work Locations: BWH Longwood Medical Area 75 Francis Street Boston 02115
Job: Business and Systems Analyst
Organization: Brigham & Women's Hospital(BWH)
Schedule: Full-time
Standard Hours: 40
Shift: Day Job
Employee Status: Regular
Recruiting Department: BWH Neurology
Job Posting: Mar 18, 2019
160
</code>
# 2) Use NLTK to tokenize / clean the listings _____no_output_____
<code>
df2 = df.copy()
stop_words = stopwords.words('english')
lemmatizer = WordNetLemmatizer()
df2["Job Description"] = df2["Job Description"].apply(lambda v: [lemmatizer.lemmatize(w) for w in word_tokenize(v) if w.isalpha() and w not in stop_words])
vector_list = sorted(list(set([inner for outer in df2["Job Description"].values for inner in outer])))
print(vector_list)['A', 'AART', 'AB', 'ACCESS', 'ACO', 'AD', 'AI', 'AIDS', 'ANALYST', 'AND', 'API', 'AWS', 'Abdul', 'Abilities', 'Ability', 'About', 'Absolutely', 'Acceleration', 'Access', 'Accommodation', 'Accommodations', 'Accountability', 'Accountable', 'Achieve', 'Acquisition', 'Act', 'Acting', 'Action', 'Actively', 'Activities', 'Actual', 'Actuarial', 'Acute', 'Additional', 'Additionally', 'Adhere', 'Administration', 'Advance', 'Advanced', 'Advice', 'Advisers', 'Advisory', 'Aetna', 'Affirmative', 'Affordability', 'Africa', 'African', 'After', 'Afternoon', 'Again', 'Agency', 'Aggregating', 'Agile', 'Agilent', 'Agility', 'Agios', 'Alaska', 'Algebra', 'Algorithm', 'Algorithms', 'Alion', 'All', 'Allocation', 'Alongside', 'Also', 'Alternatively', 'Amazon', 'America', 'American', 'Americans', 'An', 'Analyses', 'Analysis', 'Analyst', 'Analysts', 'Analytics', 'AnalyticsTM', 'Analyze', 'And', 'Apache', 'Applicants', 'Application', 'ApplicationClaims', 'Applications', 'Applied', 'Apply', 'Are', 'Area', 'Areas', 'Arthritis', 'Artificial', 'As', 'Asia', 'AspenTech', 'Assess', 'Asset', 'Assist', 'Assistance', 'Assistants', 'Assists', 'Associate', 'Association', 'AstraZeneca', 'At', 'Attend', 'Attractive', 'Attributes', 'August', 'Australia', 'Authenticity', 'Auto', 'Autoimmunity', 'Automate', 'Automation', 'Avenue', 'Award', 'Azar', 'Azure', 'B', 'BI', 'BLAS', 'BMC', 'BOP', 'BPT', 'BS', 'BWH', 'Bachelor', 'Bachelors', 'Back', 'Background', 'Banking', 'Based', 'Bash', 'Basic', 'Bayesian', 'Be', 'Beacon', 'Beam', 'Bedford', 'Begins', 'Benefits', 'BenefitsExcellent', 'Berkley', 'Big', 'Billing', 'Bioanalyzer', 'Biochemistry', 'Bioengineering', 'Bioinformatics', 'Biology', 'Bioscience', 'Biostatistics', 'BitSight', 'Blog', 'Bloomberg', 'Boards', 'Bose', 'Boston', 'BostonBMC', 'Botswana', 'Branson', 'Brave', 'Brazil', 'Breakfast', 'Brigham', 'Bring', 'Bringing', 'Broad', 'Broadway', 'Build', 'Building', 'Bulgaria', 'Bureau', 'Burning', 'Business', 'But', 'By', 'C', 'CCDS', 'CEO', 'CEPAC', 'CERTIFICATES', 'CFA', 'CFO', 'CFR', 'CHC', 'CHI', 'CIO', 'CJ', 'CMake', 'COMPETENCIES', 'CONDITIONS', 'CRM', 'CRO', 'CSCW', 'CT', 'CTO', 'CUDA', 'CV', 'CVB', 'CVS', 'Cafeteria', 'Calculus', 'Call', 'Cambridge', 'Campaign', 'Campus', 'Can', 'Cancer', 'Candidate', 'Capable', 'Capacity', 'Capital', 'Cardiovascular', 'Care', 'Carlo', 'Carry', 'Ccain', 'Census', 'Center', 'Central', 'Chain', 'Chairman', 'Chan', 'Chance', 'Characteristics', 'Chemistry', 'Chief', 'China', 'Christensen', 'Circadian', 'City', 'Claim', 'Claims', 'Classification', 'Clayton', 'Click', 'Client', 'Clinical', 'Clinicians', 'Clinicogenomics', 'Cloud', 'Cloudera', 'Clustering', 'Coding', 'Cohen', 'Collaborate', 'Collaborating', 'Collaboration', 'Collaborative', 'Colleagues', 'Collect', 'Collibra', 'Combine', 'Combining', 'Come', 'Comfort', 'Comfortable', 'Commercial', 'Commitment', 'Communicate', 'Communicating', 'Communication', 'Community', 'Company', 'Competencies', 'Completed', 'Completion', 'Complications', 'Compute', 'Computer', 'Computing', 'Conditions', 'Conduct', 'Conducting', 'Conducts', 'Confidently', 'Connect', 'Connecting', 'Considered', 'Consult', 'Consulting', 'Consumer', 'Contact', 'Container', 'Continuous', 'Contract', 'Contribute', 'Conversational', 'Coordinate', 'Coordinating', 'Core', 'Cortex', 'Cost', 'Counsel', 'Create', 'Creates', 'Creating', 'Creative', 'Critical', 'Cultivate', 'Curious', 'Currently', 'Customer', 'Cybersecurity', 'Côte', 'D', 'DARPA', 'DATA', 'DEPARTMENT', 'DESCRIPTION', 'DESIRABLE', 'DICOM', 'DNA', 'DNNs', 'DO', 'DUTIES', 'Dash', 'Dassault', 'Data', 'Database', 'Databricks', 'DatabricksBayesian', 'Datawarehouse', 'Date', 'David', 'Day', 'Dean', 'Dec', 'December', 'Decision', 'Decisiveness', 'Dedication', 'Deep', 'Defense', 'Define', 'Defines', 'Degree', 'Degrees', 'Deliver', 'Delivering', 'Delphix', 'Demonstrable', 'Demonstrate', 'Demonstrated', 'Demonstrates', 'Dental', 'Department', 'Depending', 'Description', 'Descriptive', 'Design', 'Designer', 'Designs', 'Desirable', 'Desirables', 'Desired', 'Desk', 'Detail', 'Details', 'Detection', 'Determination', 'Determine', 'DevOps', 'Develop', 'Developer', 'Developing', 'Development', 'Develops', 'Devices', 'Diagnostics', 'Dialogflow', 'Diego', 'Digestive', 'Digital', 'Dimensionality', 'Direct', 'Director', 'Disabilities', 'Disadvantaged', 'Discovery', 'Diseases', 'Disorders', 'Distribution', 'Diversity', 'Division', 'Do', 'DoD', 'Docker', 'Documenting', 'Does', 'Draw', 'Drives', 'Drug', 'Due', 'Duration', 'During', 'Duties', 'Dyad', 'ED', 'EDUCATION', 'EEO', 'EMR', 'ENGIE', 'ENOVIA', 'EO', 'ETL', 'EXG', 'EXPERIENCE', 'East', 'Eclipse', 'EconCS', 'Econometrics', 'Economic', 'Economics', 'Economist', 'Ecova', 'Education', 'Educational', 'Effective', 'Efficiency', 'Efficient', 'ElasticSearch', 'Electric', 'Electrical', 'Elsevier', 'Employee', 'Employer', 'Employment', 'Empower', 'Encoders', 'Encouragement', 'Energy', 'Engage', 'Engine', 'Engineer', 'Engineering', 'Engineers', 'England', 'EnglandBoston', 'English', 'Enhance', 'Enjoy', 'Ensure', 'Ensuring', 'Environment', 'Epidemiology', 'Equal', 'Equally', 'Equipment', 'Essential', 'Essentials', 'Establish', 'Establishing', 'Estimate', 'Etiometry', 'Europe', 'Evaluate', 'Evaluating', 'Evaluation', 'Evangelize', 'Evelo', 'Examine', 'Examining', 'Examples', 'Excel', 'Excellent', 'Exception', 'Exceptional', 'Exchange', 'Excitingly', 'Executive', 'Exempt', 'Exercises', 'Exhibit', 'Exhibits', 'Expand', 'Experience', 'Experienced', 'Experiences', 'Experimental', 'Expert', 'Expertise', 'Exposure', 'Extensive', 'External', 'Extract', 'FAIR', 'FFNN', 'FLSA', 'Facebook', 'Facility', 'FactBase', 'Factor', 'Factors', 'Fair', 'Fairbank', 'Familiar', 'Familiarity', 'Fearlessly', 'Feb', 'Federal', 'Fidelity', 'Fields', 'Finance', 'Financial', 'First', 'Flag', 'Flagship', 'Flask', 'Flatiron', 'Flexibility', 'Fluency', 'Fluent', 'Follow', 'Football', 'For', 'Forces', 'Forest', 'Form', 'Formal', 'Formalize', 'Formulate', 'Forrester', 'Fortune', 'Forum', 'Foundation', 'FoundationCORE', 'Founded', 'Framingham', 'France', 'Francis', 'Francisco', 'Fraud', 'Free', 'Friday', 'From', 'Fruit', 'Fuel', 'Full', 'Functions', 'Fusion', 'GAA', 'GI', 'GL', 'GPA', 'GPGPUYou', 'GPU', 'Gastrointesinal', 'Gastrointestinal', 'Gaussian', 'Gen', 'General', 'Generalized', 'Generating', 'Genetics', 'Git', 'GitHub', 'Glass', 'Global', 'Go', 'Goal', 'Good', 'Google', 'Governance', 'Government', 'Grad', 'Grads', 'Graduate', 'Grant', 'Great', 'Group', 'GroupsTranslate', 'Growth', 'Guard', 'Guided', 'Guidelines', 'Gym', 'HDFS', 'HIGHLY', 'HIPAA', 'HIV', 'HMS', 'HPC', 'HPSL', 'HPV', 'HQ', 'HTML', 'Hadoop', 'Hampshire', 'HampshireBoston', 'Hands', 'Harvard', 'Have', 'Haves', 'Hawaiians', 'He', 'Health', 'HealthCare', 'HealthNet', 'Healthcare', 'Heart', 'Help', 'Helpdesk', 'Hemostasis', 'Here', 'HiTS', 'Hierarchical', 'High', 'Higher', 'Highest', 'Highly', 'Hiring', 'Hispanic', 'Hive', 'Hockey', 'Homeland', 'Homesite', 'Horizons', 'Hospital', 'Hospitals', 'Hours', 'How', 'However', 'Hub', 'Huge', 'Human', 'Humana', 'Humor', 'Hypothesis', 'I', 'IBM', 'IL', 'IM', 'IMED', 'IMU', 'IMWUT', 'IP', 'IRB', 'IS', 'IT', 'IUI', 'IVD', 'IVZ', 'Idea', 'Ideal', 'Ideally', 'Identifies', 'Identify', 'Identifying', 'Identity', 'If', 'Illumina', 'Image', 'Imagine', 'Impact', 'Impala', 'Implement', 'Implementation', 'Improvement', 'In', 'Includes', 'Inclusion', 'Incomplete', 'Index', 'India', 'Indians', 'Indicator', 'Indigo', 'Individual', 'Individuals', 'Industry', 'InfiniBand', 'Info', 'Informatica', 'Informatics', 'Information', 'Infusion', 'Ingenuity', 'Initial', 'Initiative', 'Innovation', 'Insight', 'Insights', 'Institute', 'Institutes', 'Institutional', 'Instructor', 'Instrumentation', 'Insurance', 'Insurtech', 'Integrate', 'Integrating', 'Integration', 'Integrity', 'Intellectual', 'Intelligence', 'Intense', 'Interact', 'Interacting', 'Interacts', 'Interest', 'Interested', 'Interface', 'Intern', 'International', 'Internet', 'Interns', 'Internship', 'InternshipBoston', 'Interprets', 'Intuitive', 'Invesco', 'Investigators', 'Investment', 'Investments', 'Investor', 'Involve', 'Is', 'Islanders', 'It', 'Ivoire', 'JOB', 'Jameel', 'Jan', 'Java', 'JavaScript', 'Javascript', 'Jenkins', 'Job', 'Join', 'Joint', 'Journal', 'Julia', 'July', 'Junction', 'June', 'Junior', 'Jupyter', 'KEY', 'KNN', 'KPIs', 'KTC', 'Kanban', 'Keep', 'Kenexa', 'Keras', 'Key', 'Kintai', 'Kirschstein', 'Klaviyo', 'Know', 'Knowledge', 'Knowledgeable', 'Kubernetes', 'L', 'LAW', 'LICENSES', 'LMS', 'LOCATION', 'LSP', 'LSTM', 'LTD', 'Lab', 'Laboratory', 'Labs', 'Lake', 'Language', 'Languages', 'Lastly', 'Latex', 'Latif', 'Launched', 'Law', 'Lead', 'Leading', 'Learn', 'Learning', 'Lester', 'Let', 'Letters', 'Level', 'Leverage', 'Leveraging', 'Liberty', 'Life', 'Lifetime', 'Limited', 'Linear', 'Lines', 'LinkedIn', 'Linux', 'Loans', 'Localization', 'Located', 'Location', 'Locations', 'LogMeIn', 'Logistics', 'London', 'Longwood', 'Look', 'Looking', 'Lucene', 'Lunch', 'M', 'MA', 'MATLAB', 'MCR', 'MD', 'MDM', 'MGH', 'MIT', 'MITRE', 'ML', 'MLlib', 'MPEC', 'MPI', 'MPP', 'MRI', 'MS', 'MSc', 'MXNet', 'MYSQLLanguages', 'MacB', 'Machine', 'Machines', 'Mackey', 'Madrid', 'Main', 'Maintaining', 'Make', 'Makes', 'Manage', 'Management', 'Manager', 'Managing', 'Manipulate', 'Manipulating', 'Manipulation', 'Manufacturing', 'Many', 'Mapping', 'Mar', 'Market', 'Marketbasket', 'Marketers', 'Marketing', 'Markov', 'Massachusetts', 'Master', 'Masters', 'MatLab', 'Matab', 'Materials', 'Math', 'MathWorks', 'Mathematics', 'Matlab', 'Matploltib', 'May', 'McKinsey', 'Measure', 'Measurement', 'Medicaid', 'Medical', 'Medicine', 'Members', 'Mentor', 'Mercurial', 'Metadata', 'Metrics', 'Microsoft', 'Middle', 'Military', 'Min', 'Mine', 'Minimum', 'Minneapolis', 'Minorities', 'Mo', 'Model', 'Modeling', 'Modelling', 'Moderna', 'Molecular', 'MongoDB', 'Monitor', 'Monte', 'More', 'Moreover', 'Morning', 'Most', 'Mozambique', 'Much', 'MuleSoft', 'Multiple', 'Must', 'Mutual', 'MySQL', 'N', 'NCI', 'NERD', 'NGS', 'NIH', 'NLP', 'NPL', 'NRSA', 'NY', 'NYC', 'Nanodrop', 'Nation', 'National', 'Native', 'Natives', 'Natural', 'Nature', 'Need', 'Network', 'Networks', 'Neural', 'New', 'Next', 'Nice', 'Nift', 'No', 'NoSQL', 'None', 'Normal', 'North', 'Northeastern', 'Notebooks', 'Now', 'NumPy', 'Numpy', 'OBI', 'OFCCP', 'OOP', 'OUR', 'Object', 'Objective', 'Occasional', 'Of', 'Offers', 'Office', 'Officer', 'On', 'Oncology', 'One', 'Ongoing', 'Only', 'OpenFlow', 'Openly', 'Operational', 'Operations', 'Opportunities', 'Opportunity', 'Optic', 'Optimisation', 'Optimizing', 'Options', 'Oracle', 'Ordinance', 'OrgSolutions', 'Organization', 'Oriented', 'Original', 'Orthopaedic', 'Orthopedics', 'Other', 'Our', 'Outcomes', 'Outlook', 'Outstanding', 'Over', 'Overview', 'Owners', 'P', 'PCA', 'PHS', 'PI', 'PIVOT', 'PM', 'POSITION', 'PRINCIPAL', 'Pacific', 'Paid', 'Pandas', 'Paris', 'Park', 'Parkes', 'Parkinson', 'Participate', 'Partner', 'Partners', 'Passion', 'Passionate', 'Patent', 'Patients', 'Perform', 'Performance', 'Performing', 'Performs', 'Perks', 'Perl', 'Personal', 'Personalization', 'Persons', 'PhD', 'PharmD', 'Pharmaceutical', 'Pharmacology', 'Philadelphia', 'Physical', 'Physics', 'Pioneering', 'Plan', 'Planning', 'Platforms', 'Please', 'Plotly', 'Plusses', 'Policy', 'Population', 'Position', 'Positive', 'Possess', 'PostDocs', 'Poster', 'Posters', 'Postg', 'PostgreSQL', 'Postgres', 'Postgresql', 'Posting', 'Poverty', 'Power', 'PowerAdvocate', 'PowerPoint', 'Powered', 'Practical', 'Practice', 'Predictive', 'Preferable', 'Preference', 'Preferred', 'Prepare', 'Prepares', 'Present', 'Presenting', 'Presents', 'Presto', 'PrestoDB', 'Preventing', 'Previous', 'Price', 'Primary', 'Prime', 'Principal', 'Prior', 'Privacy', 'Proactively', 'Probability', 'Process', 'Processing', 'Procurement', 'Produce', 'Product', 'Productivity', 'Productize', 'Prof', 'Professional', 'Professions', 'Proficiency', 'Proficient', 'Program', 'Programming', 'Project', 'Projects', 'Propensity', 'Prototype', 'Proven', 'Provide', 'Providence', 'Psychology', 'Public', 'Publish', 'Pubmed', 'Pulse', 'Purchasing', 'Purpose', 'Pursuant', 'Putting', 'PyTorch', 'Python', 'PythonKnowledge', 'QA', 'QC', 'QUALIFICATIONS', 'Quailfications', 'Qualification', 'Qualifications', 'Quality', 'Quantitative', 'Qubit', 'Query', 'Quest', 'R', 'RDMA', 'REGISTRATIONS', 'REQUIRED', 'REQUIREMENTS', 'RESEARCH', 'RESPECT', 'RESPONSIBILITIES', 'RESPONSIBILITY', 'RESTful', 'RF', 'RNA', 'RYou', 'Radiology', 'Random', 'Rapidly', 'Read', 'Reasonable', 'RecSys', 'Recent', 'Recognition', 'Recognized', 'Reconstruction', 'Recruiter', 'Recruiting', 'RedShift', 'Redshift', 'Regression', 'Regular', 'Relationship', 'Relevant', 'ReltioMust', 'Rentals', 'Reporting', 'Req', 'Required', 'Requirements', 'Research', 'Researching', 'Resources', 'Respect', 'Respond', 'Responsibilities', 'Responsibility', 'Responsible', 'Results', 'Review', 'Richard', 'Rifiniti', 'Right', 'Risk', 'Roche', 'Role', 'Root', 'Ruby', 'Run', 'Running', 'Ruth', 'SAI', 'SAP', 'SAS', 'SCIENTIST', 'SENIOR', 'SIGIR', 'SKILLS', 'SLAM', 'SPARK', 'SPSS', 'SQL', 'SSIS', 'STATA', 'STD', 'STEM', 'SUMMARY', 'SUPERVISORY', 'SUPPLEMENT', 'SVD', 'SVM', 'SVMs', 'SaaS', 'Sample', 'Samples', 'San', 'Saturdays', 'Scala', 'Schedule', 'Scheduled', 'Scheduling', 'Schneider', 'Scholar', 'Scholarship', 'School', 'SciKit', 'SciPy', 'Science', 'Sciences', 'Scientific', 'Scientist', 'Scientists', 'Scikit', 'Scrum', 'Seamlessly', 'Seattle', 'Sec', 'Secondary', 'Secretary', 'Security', 'See', 'Seek', 'Segment', 'Select', 'Selected', 'Self', 'Senior', 'Sense', 'Sensor', 'September', 'Seres', 'Serve', 'Server', 'Service', 'Services', 'Several', 'Shaker', 'Shape', 'Share', 'She', 'Shift', 'Shiny', 'ShinyExpansive', 'Significant', 'Since', 'Singapore', 'Sir', 'Skills', 'Slack', 'Sleep', 'So', 'Soccer', 'Social', 'Sofia', 'Software', 'Solid', 'Solution', 'Solutions', 'Solve', 'Some', 'South', 'Spark', 'SparkProficiency', 'Special', 'Spend', 'Sports', 'Spotify', 'Springs', 'St', 'Stakeholder', 'Standard', 'Start', 'Stata', 'State', 'Statement', 'States', 'Statistical', 'Statistics', 'StatisticsExperience', 'Status', 'Stay', 'Steps', 'Steward', 'Stewards', 'Still', 'Stock', 'Strategic', 'Street', 'Strong', 'Structured', 'Student', 'Students', 'Studio', 'Study', 'Submit', 'Subsidized', 'Subversion', 'Success', 'Successful', 'Successfully', 'Such', 'Summary', 'Summer', 'Superior', 'Supervisory', 'Supplements', 'Supply', 'Support', 'Surgery', 'Sustainability', 'Sustained', 'System', 'Systemes', 'Systems', 'Systèmes', 'TA', 'TECHNICAL', 'THE', 'TITLE', 'Tableau', 'Take', 'Talend', 'Teach', 'Teaching', 'Team', 'Teammate', 'Teamwork', 'Tech', 'Technical', 'Technologies', 'Technology', 'Telecom', 'Temporary', 'TensorFlow', 'TensorFlowDatabases', 'Tensorflow', 'Test', 'Testing', 'Text', 'Thailand', 'That', 'The', 'Therapeutic', 'Therapeutics', 'Therapy', 'There', 'These', 'They', 'This', 'Those', 'Thoughtful', 'Three', 'Thrive', 'Through', 'Throughout', 'Thursday', 'Time', 'Times', 'Title', 'To', 'Tobacco', 'Today', 'Together', 'Tokyo', 'Tools', 'Torch', 'Train', 'Training', 'Traits', 'Transfer', 'Translate', 'Transnational', 'Transportation', 'Travel', 'Treatments', 'Tree', 'Trello', 'TripAdvisor', 'Troubleshoot', 'Tryout', 'Tuesday', 'Tufts', 'Twitter', 'Two', 'Type', 'Typically', 'UIST', 'UK', 'US', 'USA', 'UX', 'Ukraine', 'Uncompromising', 'Under', 'Understand', 'Understanding', 'Undertaking', 'Underwriting', 'Unit', 'United', 'University', 'Unposting', 'Up', 'Us', 'Use', 'User', 'Users', 'Using', 'Utilize', 'Utilizing', 'VALUES', 'VBA', 'VM', 'VMs', 'VMware', 'VR', 'Value', 'Values', 'Vector', 'Ventures', 'Verily', 'Very', 'Veteran', 'Veterans', 'Videos', 'Virgin', 'Virtual', 'Vistaprint', 'W', 'WA', 'WORKING', 'WWW', 'Wall', 'Waltham', 'Want', 'Watson', 'We', 'Web', 'Webinars', 'Wednesday', 'Weekly', 'Wellbeing', 'Werfen', 'What', 'When', 'Where', 'Whether', 'While', 'Who', 'Why', 'Will', 'Willingness', 'Wired', 'With', 'Within', 'Without', 'Women', 'Word', 'Work', 'Working', 'Works', 'World', 'Worldpay', 'Would', 'Write', 'Writing', 'Wyman', 'XGBoost', 'XML', 'Xgboost', 'Yale', 'York', 'You', 'Your', 'Youssef', 'ZR', 'ZRNift', 'Zealand', 'Zeppelin', 'Zimbabwe', 'Zurich', 'ability', 'able', 'abreast', 'abroad', 'absolute', 'abstract', 'academia', 'academic', 'academically', 'accelerate', 'accelerating', 'accept', 'accepted', 'accepting', 'access', 'accessible', 'accessing', 'accolade', 'accommodate', 'accommodation', 'accompanying', 'accomplish', 'account', 'accountability', 'accountable', 'accredited', 'accuracy', 'accurate', 'accurately', 'achievable', 'achieve', 'achievement', 'achieving', 'acid', 'acquired', 'acquisition', 'across', 'act', 'action', 'actionable', 'active', 'actively', 'activity', 'actual', 'actually', 'actuarial', 'acuity', 'ad', 'adapt', 'added', 'addition', 'additional', 'address', 'addressing', 'adept', 'adhere', 'adjust', 'adjusted', 'administrative', 'admission', 'admitted', 'adoption', 'advance', 'advanced', 'advancement', 'advancing', 'advantage', 'advertised', 'advertisement', 'advice', 'advisor', 'advisory', 'advocacy', 'advocate', 'affect', 'affiliate', 'affiliated', 'affiliation', 'affirmative', 'affordable', 'aforementioned', 'afraid', 'age', 'agency', 'agenda', 'agent', 'aggregate', 'aggressive', 'agile', 'ago', 'agreement', 'ahead', 'aid', 'aim', 'aimed', 'al', 'alert', 'algebra', 'algorithm', 'algorithmic', 'align', 'aligned', 'aligning', 'alignment', 'aligns', 'alliance', 'allocate', 'allocation', 'allow', 'allows', 'along', 'alongside', 'also', 'alternative', 'always', 'amazing', 'ambiguity', 'ambiguous', 'ambulatory', 'amenable', 'among', 'amount', 'analyse', 'analysesDevelops', 'analysesImplement', 'analysis', 'analysisAnalyze', 'analysisComfort', 'analysisEnhancing', 'analysisProvide', 'analyst', 'analytic', 'analytical', 'analytics', 'analyze', 'analyzed', 'analyzes', 'analyzing', 'anatomical', 'ancestry', 'annotation', 'annual', 'annually', 'anomaly', 'another', 'answer', 'answered', 'answering', 'anticipate', 'anticipated', 'anyone', 'anything', 'anywhere', 'applicability', 'applicable', 'applicant', 'application', 'applied', 'applies', 'apply', 'applying', 'appointment', 'appreciates', 'appreciation', 'approach', 'approachesHas', 'appropriate', 'appropriately', 'approval', 'approximately', 'architect', 'architecting', 'architectural', 'architecture', 'archive', 'area', 'arise', 'around', 'arrangement', 'array', 'arrest', 'art', 'articulating', 'artifact', 'artificial', 'artist', 'asap', 'asked', 'asking', 'aspect', 'aspenONE', 'aspiration', 'aspire', 'ass', 'assay', 'assembled', 'assessment', 'asset', 'assigned', 'assignment', 'assist', 'assistance', 'assistant', 'assisting', 'associate', 'associated', 'assume', 'assumption', 'assurance', 'assure', 'attached', 'attend', 'attendance', 'attending', 'attention', 'attitude', 'attract', 'attracts', 'attribute', 'attributed', 'attribution', 'attrition', 'audience', 'audiencesDemonstrates', 'audio', 'audit', 'auditing', 'augment', 'augmented', 'authentication', 'author', 'authorization', 'authorship', 'automate', 'automated', 'automating', 'automation', 'autonomously', 'autonomy', 'available', 'avenue', 'average', 'award', 'awarded', 'awareness', 'b', 'bachelor', 'back', 'backed', 'background', 'bagging', 'balance', 'banking', 'base', 'based', 'baseline', 'basic', 'basis', 'bayes', 'bear', 'beat', 'beating', 'become', 'becoming', 'began', 'begin', 'beginning', 'begun', 'behalf', 'behave', 'behavior', 'behavioral', 'behaviour', 'behavioural', 'behind', 'belief', 'believe', 'bench', 'benchmark', 'benefit', 'bespoke', 'best', 'better', 'beyond', 'bias', 'big', 'bilingual', 'billing', 'billion', 'bioinformaticians', 'bioinformatics', 'biological', 'biologist', 'biology', 'biologyComfort', 'biomedical', 'biometric', 'biopharmaceutical', 'biophysical', 'bioscience', 'biostatistical', 'biostatisticians', 'biostatistics', 'bleeding', 'blend', 'blocker', 'blood', 'board', 'body', 'bold', 'bonus', 'boosting', 'borough', 'bottom', 'bound', 'boundary', 'boutique', 'box', 'brain', 'brainstorm', 'brand', 'breadth', 'break', 'breaking', 'breakthrough', 'brief', 'brightest', 'brilliant', 'bring', 'bringing', 'brings', 'broad', 'broader', 'broadly', 'broker', 'budgeting', 'bug', 'build', 'building', 'built', 'burden', 'burgeoning', 'business', 'busy', 'buyer', 'c', 'caching', 'calculation', 'calibration', 'call', 'campaign', 'campus', 'cancer', 'candidate', 'cannabis', 'capability', 'capable', 'capacity', 'capitalization', 'capture', 'carbon', 'cardiovascular', 'care', 'career', 'careful', 'caring', 'carried', 'carrier', 'carry', 'carve', 'case', 'casualty', 'catalog', 'categorization', 'category', 'causal', 'causality', 'cause', 'cell', 'center', 'central', 'centric', 'ceremony', 'certain', 'certification', 'certified', 'chain', 'challenge', 'challenged', 'challenging', 'chance', 'change', 'changing', 'channel', 'characteristic', 'characterization', 'characterize', 'characterizing', 'charge', 'charged', 'chart', 'chatbots', 'check', 'chemist', 'chemistry', 'choice', 'choose', 'chosen', 'churn', 'circumstance', 'cited', 'citizen', 'citizenship', 'city', 'civil', 'civilian', 'claim', 'clarity', 'class', 'classical', 'classification', 'classified', 'classifier', 'classify', 'classifying', 'classroom', 'clean', 'cleaning', 'cleanliness', 'cleanse', 'cleansing', 'clear', 'clearable', 'clearance', 'clearly', 'click', 'clickstream', 'client', 'climate', 'clinic', 'clinical', 'clinician', 'clinicogenomic', 'close', 'closely', 'cloud', 'club', 'cluster', 'clustering', 'coach', 'coaching', 'coagulation', 'code', 'coding', 'cognitive', 'coherent', 'cohort', 'collaborate', 'collaborating', 'collaboration', 'collaborative', 'collaboratively', 'collaborator', 'colleague', 'collect', 'collected', 'collecting', 'collection', 'college', 'color', 'combating', 'combination', 'combine', 'combined', 'combining', 'come', 'comfort', 'comfortable', 'coming', 'command', 'commensurate', 'commerce', 'commercial', 'commit', 'commitment', 'committed', 'common', 'communicate', 'communicates', 'communicating', 'communication', 'communicative', 'communicator', 'community', 'company', 'compare', 'comparison', 'compartmental', 'compatibly', 'compelling', 'compensates', 'compensation', 'competence', 'competency', 'competing', 'competition', 'competitive', 'competitor', 'complaint', 'complete', 'completed', 'completely', 'completing', 'completion', 'complex', 'complexity', 'compliance', 'comply', 'component', 'compound', 'comprehensive', 'comprehensiveness', 'comprised', 'comprises', 'computation', 'computational', 'computationally', 'computer', 'computing', 'conceive', 'concentration', 'concept', 'conceptualize', 'concise', 'concluded', 'conclusion', 'concreate', 'concurrent', 'condition', 'conditional', 'conduct', 'conducted', 'conducting', 'conference', 'conferencing', 'confidence', 'confident', 'confidentiality', 'confidently', 'configuration', 'congestive', 'connect', 'connected', 'consider', 'considerable', 'consideration', 'considered', 'considering', 'consistent', 'consisting', 'consists', 'consolidate', 'consolidation', 'constant', 'constantly', 'constraint', 'construct', 'construction', 'constructive', 'construed', 'consult', 'consultant', 'consultation', 'consultative', 'consulting', 'consumable', 'consumer', 'consumption', 'contact', 'contain', 'contained', 'container', 'contains', 'contender', 'content', 'context', 'continual', 'continually', 'continue', 'continued', 'continuing', 'continuous', 'continuously', 'continuum', 'contract', 'contractor', 'contribute', 'contributes', 'contributing', 'contribution', 'contributor', 'control', 'convenience', 'convention', 'conventional', 'conversation', 'convert', 'convey', 'conviction', 'convince', 'convincingly', 'cooperatively', 'coordinate', 'coordination', 'coordinator', 'core', 'coronary', 'corporate', 'correct', 'corrective', 'correlate', 'corresponding', 'cost', 'could', 'country', 'coupled', 'courage', 'course', 'coursework', 'cover', 'coverage', 'crave', 'crazy', 'create', 'created', 'creates', 'creating', 'creation', 'creative', 'credential', 'credibility', 'creditable', 'creed', 'crime', 'criminal', 'critical', 'criticism', 'cross', 'crucial', 'cryptography', 'cubicle', 'culmination', 'cultural', 'culturally', 'culture', 'cumulative', 'curation', 'curiosity', 'curious', 'currency', 'current', 'currently', 'curriculum', 'custom', 'customer', 'customized', 'cut', 'cutting', 'cvshealthsupport', 'cyber', 'cybersecurity', 'cycle', 'cyclostationary', 'daily', 'dashboard', 'data', 'dataOrganized', 'database', 'datapoints', 'dataset', 'datasets', 'date', 'day', 'deadline', 'deaf', 'deal', 'dealing', 'death', 'debugging', 'decision', 'decomposition', 'decreasing', 'dedicated', 'dedication', 'deemed', 'deep', 'deepen', 'deeper', 'deeply', 'define', 'defined', 'defining', 'degree', 'deidentification', 'delay', 'deliver', 'deliverable', 'delivered', 'delivering', 'delivers', 'delivery', 'demand', 'demo', 'democratization', 'democratize', 'demographic', 'demography', 'demonstrable', 'demonstrably', 'demonstrate', 'demonstrated', 'demonstrates', 'dental', 'department', 'depending', 'depict', 'deploy', 'deploying', 'deployment', 'deploys', 'depth', 'derive', 'derived', 'deriving', 'describe', 'described', 'describing', 'description', 'descriptive', 'deserves', 'design', 'designation', 'designed', 'designer', 'designing', 'desirable', 'desire', 'desired', 'desktop', 'detail', 'detailed', 'detailing', 'detect', 'detection', 'determination', 'determine', 'determined', 'determining', 'deterministic', 'devastating', 'develop', 'developed', 'developedInterface', 'developer', 'developing', 'development', 'developmentExperience', 'develops', 'device', 'devise', 'devising', 'devoted', 'diabetes', 'diagnosing', 'diagnosis', 'diagnostic', 'diagnostics', 'diagram', 'dialect', 'difference', 'different', 'differentiate', 'differentiated', 'differentiating', 'difficult', 'dig', 'digestive', 'digging', 'digital', 'diligence', 'dimensionality', 'direct', 'direction', 'directly', 'dirty', 'disability', 'disadvantaged', 'discharge', 'discipline', 'disciplined', 'disclose', 'disclosed', 'disclosure', 'discover', 'discovering', 'discovers', 'discovery', 'discrepancy', 'discriminate', 'discrimination', 'discus', 'discussed', 'discussion', 'disease', 'disorder', 'disparate', 'disposition', 'disprove', 'disrupted', 'disrupter', 'disruptive', 'disseminate', 'distill', 'distinctive', 'distributed', 'distribution', 'distributor', 'diverse', 'diversifying', 'diversity', 'divestments', 'division', 'doctor', 'doctoral', 'document', 'documentation', 'documented', 'dollar', 'domain', 'domestically', 'dominant', 'done', 'door', 'double', 'downtown', 'dramatic', 'draw', 'dream', 'drive', 'driven', 'driver', 'driving', 'drop', 'drug', 'drugging', 'due', 'duration', 'dust', 'duty', 'dynamic', 'eValuation', 'eager', 'earliest', 'early', 'earned', 'earning', 'ease', 'easy', 'ecommerce', 'econometric', 'econometrics', 'economic', 'economically', 'economics', 'economy', 'ecosystem', 'edX', 'edge', 'editing', 'educating', 'education', 'educational', 'educationally', 'effect', 'effective', 'effectively', 'effectiveness', 'efficiency', 'efficient', 'efficiently', 'effort', 'eigenvalue', 'either', 'electronic', 'elevate', 'eligible', 'elite', 'else', 'email', 'embrace', 'embracing', 'emerge', 'emerged', 'emerging', 'eminence', 'empathetically', 'emphasis', 'emphasized', 'employ', 'employee', 'employer', 'employment', 'empower', 'empowers', 'enable', 'enabled', 'enablement', 'enabler', 'enables', 'enabling', 'encapsulate', 'encourage', 'encouraged', 'encourages', 'encouraging', 'end', 'endpoint', 'energetic', 'energized', 'energy', 'enforcement', 'engage', 'engaged', 'engagement', 'engaging', 'engender', 'engine', 'engineer', 'engineering', 'enhance', 'enhancement', 'enhancing', 'enjoy', 'enjoys', 'enough', 'enrich', 'enrichment', 'enrolled', 'ensemble', 'ensure', 'ensures', 'ensuring', 'entail', 'entailing', 'enterprise', 'enthusiasm', 'enthusiast', 'enthusiastic', 'entire', 'entity', 'entrepreneurial', 'entry', 'environment', 'environmentCommunicates', 'environmentDemonstrates', 'environmentExcellent', 'environmentExperience', 'environmental', 'epidemiological', 'epidemiologist', 'epidemiology', 'equal', 'equally', 'equation', 'equilavent', 'equip', 'equipment', 'equivalent', 'error', 'escalate', 'especially', 'essential', 'establish', 'established', 'establishing', 'establishment', 'estimate', 'estimating', 'et', 'etc', 'ethic', 'ethical', 'ethnic', 'ethnicity', 'euro', 'evaluate', 'evaluating', 'evaluation', 'even', 'evening', 'event', 'ever', 'every', 'everyday', 'everyone', 'everything', 'everywhere', 'evidence', 'evolution', 'evolve', 'evolving', 'exam', 'examining', 'example', 'exceeding', 'excel', 'excellence', 'excellent', 'exceptional', 'excessive', 'exchange', 'exchanging', 'excited', 'exciting', 'excluding', 'execute', 'executed', 'executing', 'execution', 'executive', 'exercise', 'exhaustive', 'exhibit', 'exhilarating', 'exist', 'existing', 'expand', 'expanding', 'expands', 'expansion', 'expectation', 'expected', 'expedition', 'expense', 'experience', 'experienceExperience', 'experienced', 'experiential', 'experiment', 'experimental', 'experimentation', 'expert', 'expertise', 'explain', 'explaining', 'exploration', 'exploratory', 'explore', 'exploring', 'exponential', 'exposure', 'exposureExperience', 'expression', 'extend', 'extended', 'extending', 'extensive', 'external', 'externally', 'extract', 'extracting', 'extraction', 'extraordinary', 'extreme', 'eye', 'fabrication', 'face', 'facet', 'facilitate', 'facilitated', 'facility', 'facing', 'facingExperience', 'fact', 'factor', 'faculty', 'failure', 'fair', 'famed', 'familiar', 'familiarity', 'family', 'fan', 'fashion', 'fast', 'faster', 'faulty', 'feature', 'federal', 'feed', 'feedback', 'feel', 'fellow', 'fertile', 'fertility', 'fidelity', 'field', 'fieldDemonstrated', 'fieldProficient', 'fiercely', 'fifty', 'fight', 'file', 'filing', 'final', 'finalizing', 'finally', 'finance', 'financial', 'find', 'finding', 'finish', 'fintech', 'firm', 'first', 'fit', 'fitting', 'five', 'fix', 'flexibility', 'flexible', 'flow', 'focus', 'focused', 'focusing', 'follow', 'following', 'foot', 'footprint', 'force', 'forecasting', 'foremost', 'forest', 'forge', 'form', 'formal', 'format', 'formatting', 'formed', 'former', 'formerly', 'formulate', 'formulation', 'forth', 'forward', 'foster', 'found', 'foundation', 'founded', 'founder', 'founding', 'four', 'fourth', 'frame', 'framework', 'fraud', 'free', 'freedom', 'frequently', 'fresh', 'front', 'fuel', 'fulfill', 'full', 'fuller', 'fullest', 'fulltime', 'fully', 'fun', 'function', 'functional', 'functionality', 'fund', 'fundamental', 'fundamentally', 'funded', 'funders', 'funnel', 'furnish', 'furtherance', 'furthering', 'future', 'fuzzy', 'gain', 'gained', 'gaining', 'game', 'gaming', 'gas', 'gastroenterology', 'gather', 'gathering', 'gender', 'general', 'generalist', 'generalized', 'generally', 'generate', 'generated', 'generates', 'generating', 'generation', 'generic', 'generous', 'genetic', 'geneticist', 'genetics', 'genomic', 'genomics', 'genuine', 'get', 'getting', 'ggplot', 'gift', 'git', 'give', 'given', 'giving', 'glean', 'global', 'globally', 'globe', 'go', 'goal', 'goalsInterest', 'good', 'got', 'govern', 'governance', 'governing', 'government', 'grading', 'graduate', 'graduated', 'graduation', 'grant', 'grantee', 'graph', 'graphic', 'graphical', 'gratifying', 'great', 'greater', 'greatest', 'greatly', 'greatness', 'grid', 'ground', 'group', 'grow', 'growing', 'growth', 'guide', 'guideline', 'guiding', 'gym', 'habit', 'half', 'hand', 'handle', 'handled', 'handling', 'happier', 'happiness', 'harassment', 'hard', 'harmonization', 'harness', 'harnessing', 'hat', 'head', 'headquartered', 'headquarters', 'health', 'healthcare', 'healthier', 'healthiest', 'healthy', 'hearing', 'heart', 'heat', 'heavy', 'hectic', 'held', 'help', 'helpful', 'helping', 'hepatitis', 'hidden', 'high', 'higher', 'highest', 'highly', 'hire', 'hired', 'hiring', 'history', 'hoc', 'hold', 'holder', 'holding', 'home', 'horizon', 'horizontal', 'hospital', 'host', 'hosted', 'hosting', 'hottest', 'hour', 'house', 'http', 'human', 'humanity', 'humility', 'hundred', 'hybrid', 'hyperparameter', 'hypothesis', 'idea', 'ideal', 'ideally', 'ideation', 'identifiable', 'identification', 'identified', 'identify', 'identifying', 'identity', 'illustrate', 'image', 'imagery', 'imagination', 'imagined', 'imaging', 'immediate', 'immediately', 'immigration', 'immune', 'impact', 'impactful', 'impacting', 'impairment', 'implement', 'implementation', 'implemented', 'implementing', 'implication', 'importance', 'important', 'importantly', 'importing', 'impossible', 'improve', 'improved', 'improvement', 'improving', 'imputation', 'inappropriate', 'incentive', 'include', 'included', 'includes', 'including', 'inclusion', 'inclusive', 'inclusivity', 'income', 'incompatible', 'incomplete', 'inconsistency', 'incorporate', 'increase', 'increasing', 'incrementally', 'incumbent', 'indeed', 'independence', 'independent', 'independently', 'indicate', 'indicator', 'individual', 'individualized', 'indoor', 'industry', 'inexplicable', 'infectious', 'inferential', 'inflammatory', 'influence', 'influencing', 'inform', 'informatica', 'informaticians', 'information', 'informed', 'informing', 'infrastructure', 'infused', 'ingenuity', 'ingest', 'ingestion', 'inhibited', 'initial', 'initiated', 'initiative', 'innovate', 'innovating', 'innovation', 'innovative', 'inordinate', 'input', 'inquired', 'inquiry', 'insanely', 'inside', 'insight', 'insightful', 'inspire', 'inspired', 'inspires', 'installation', 'instance', 'instinctive', 'institution', 'institutional', 'instruction', 'instructor', 'instrumental', 'insurance', 'insuranceDental', 'insurer', 'integral', 'integrate', 'integrated', 'integrating', 'integration', 'integrative', 'integrity', 'intellectual', 'intelligence', 'intelligent', 'intended', 'intensive', 'interact', 'interacting', 'interaction', 'interactive', 'interdisciplinary', 'interest', 'interested', 'interesting', 'interim', 'intermediate', 'intermittent', 'intern', 'internal', 'internally', 'international', 'internationally', 'internet', 'internship', 'interpersonal', 'interpret', 'interpretation', 'interpreted', 'interpreting', 'interval', 'intervention', 'interventional', 'interview', 'intrigued', 'introduce', 'introducing', 'introductory', 'intuitive', 'invent', 'inventing', 'inventive', 'inventory', 'invest', 'investigate', 'investigates', 'investigating', 'investigation', 'investigator', 'investing', 'investment', 'invite', 'involve', 'involved', 'involvement', 'involves', 'involving', 'issue', 'issuesPresent', 'item', 'iterative', 'jeopardize', 'job', 'join', 'joining', 'journal', 'journey', 'judge', 'judgment', 'junior', 'justification', 'k', 'keen', 'key', 'kick', 'kind', 'king', 'know', 'knowledge', 'knowledgeable', 'known', 'lab', 'label', 'labor', 'laboratory', 'lake', 'lambda', 'landmark', 'landscape', 'language', 'large', 'largely', 'largest', 'last', 'lasting', 'latent', 'later', 'latest', 'latitude', 'latter', 'launch', 'launched', 'law', 'lawfully', 'le', 'lead', 'leader', 'leadership', 'leading', 'leaf', 'learn', 'learner', 'learning', 'leased', 'least', 'leave', 'led', 'left', 'legacy', 'legal', 'legally', 'lens', 'lesion', 'let', 'letter', 'level', 'leverage', 'leveraging', 'liability', 'liaising', 'liaison', 'library', 'lie', 'lieu', 'life', 'lifecycle', 'lifestyle', 'lift', 'like', 'likelihood', 'likely', 'limit', 'limitation', 'limited', 'line', 'lineage', 'linear', 'link', 'linking', 'list', 'listed', 'listen', 'listener', 'listing', 'literature', 'little', 'live', 'loan', 'local', 'localization', 'located', 'location', 'locker', 'logical', 'logically', 'logistic', 'long', 'longitudinal', 'look', 'looking', 'loosely', 'love', 'low', 'lower', 'loyalty', 'machine', 'macro', 'mad', 'made', 'main', 'maintain', 'maintaining', 'maintains', 'maintenance', 'major', 'make', 'maker', 'making', 'manage', 'managed', 'management', 'manager', 'managing', 'manipulate', 'manipulating', 'manipulation', 'manipulationExperience', 'manner', 'mannerCreating', 'manufacturing', 'manuscript', 'many', 'mapping', 'margin', 'marital', 'market', 'marketing', 'marketplace', 'marshal', 'mart', 'massive', 'master', 'match', 'matching', 'material', 'maternity', 'math', 'mathematical', 'mathematician', 'mathematics', 'matplotlib', 'matrixed', 'matter', 'mature', 'maturity', 'maximize', 'may', 'mdm', 'mean', 'meaning', 'meaningful', 'measurable', 'measure', 'measurement', 'mechanism', 'mechanistic', 'medical', 'medicine', 'medium', 'meet', 'meeting', 'member', 'mental', 'mentality', 'mentor', 'mentored', 'mentoring', 'merge', 'merger', 'merit', 'message', 'metabolic', 'metabolite', 'metabolomics', 'metadata', 'method', 'methodological', 'methodology', 'methodsDoing', 'meticulous', 'metric', 'mgmt', 'micro', 'microbiome', 'microenvironment', 'microservices', 'might', 'migration', 'milestone', 'military', 'million', 'mind', 'mindset', 'mine', 'minimal', 'minimum', 'mining', 'miningSolid', 'minority', 'mission', 'mobile', 'modality', 'model', 'modeling', 'modelling', 'modellingAdvanced', 'moderate', 'moderately', 'modern', 'modified', 'module', 'molecular', 'molecule', 'moment', 'money', 'monitor', 'monitoring', 'month', 'motivated', 'motivation', 'move', 'moving', 'much', 'multi', 'multidisciplinary', 'multiple', 'multitask', 'multitude', 'multivariate', 'music', 'must', 'mutual', 'myriad', 'narrativeMeticulous', 'nation', 'national', 'natural', 'nature', 'navigating', 'naïve', 'near', 'nearly', 'necessarily', 'necessary', 'necessity', 'need', 'needed', 'neededSelecting', 'needing', 'negotiate', 'neighborhood', 'net', 'network', 'neural', 'neuroimaging', 'neuroscience', 'never', 'new', 'newly', 'next', 'nimble', 'noise', 'noncitizen', 'nonpartisan', 'nontechnical', 'normalization', 'normalized', 'normalizing', 'normally', 'note', 'notebook', 'nothing', 'novel', 'nucleic', 'number', 'numerate', 'numerical', 'numerous', 'nurture', 'nurturing', 'ob', 'object', 'objective', 'objectivesMaintain', 'observational', 'observe', 'obstacle', 'obtain', 'obtaining', 'occasional', 'occur', 'offHealth', 'offer', 'offered', 'offering', 'office', 'offline', 'often', 'oil', 'onboard', 'oncology', 'one', 'ongoing', 'online', 'onsite', 'open', 'opening', 'openly', 'openness', 'operate', 'operates', 'operating', 'operation', 'operational', 'operationalize', 'opinion', 'opportunity', 'optical', 'optimal', 'optimally', 'optimization', 'optimize', 'optimized', 'optimizing', 'option', 'oral', 'orchestration', 'orchestrator', 'order', 'ordering', 'orderliness', 'ordinance', 'organization', 'organizationDeliver', 'organizational', 'organize', 'organized', 'organizing', 'orientation', 'oriented', 'origin', 'original', 'others', 'otherwise', 'out', 'outcome', 'outgoing', 'outlier', 'outlined', 'outlook', 'outperform', 'outside', 'outsourced', 'outstanding', 'overall', 'overarching', 'overcoming', 'overseeing', 'oversight', 'owned', 'ownership', 'owning', 'pace', 'paced', 'package', 'paid', 'paired', 'panda', 'panel', 'paper', 'paradigm', 'parallel', 'parameter', 'parameterization', 'part', 'participant', 'participate', 'participates', 'participating', 'participation', 'particularly', 'partner', 'partnered', 'partnering', 'partnership', 'party', 'passion', 'passionate', 'past', 'path', 'pathology', 'pathway', 'patient', 'pattern', 'pay', 'payer', 'paying', 'payment', 'pedagogical', 'peer', 'penalized', 'people', 'per', 'perception', 'perfect', 'perform', 'performance', 'performanceWorking', 'performed', 'performing', 'period', 'periodically', 'permanent', 'permitted', 'person', 'personal', 'personality', 'personalization', 'personalize', 'personalized', 'personnel', 'perspective', 'pharmaceutical', 'pharmacist', 'pharmacology', 'phase', 'phenotyping', 'phone', 'physic', 'physical', 'physician', 'physiological', 'physiology', 'pick', 'picture', 'piece', 'pilot', 'piloting', 'pioneer', 'pioneering', 'pipeline', 'pivotal', 'place', 'plan', 'planet', 'planning', 'platform', 'play', 'player', 'playing', 'please', 'plot', 'plus', 'plusExperience', 'plusHigh', 'plusProficient', 'plusProven', 'point', 'poised', 'policy', 'political', 'population', 'portal', 'portfolio', 'position', 'positive', 'positively', 'posse', 'possessing', 'possibility', 'possible', 'post', 'postdoc', 'posting', 'potential', 'potentially', 'pound', 'power', 'powered', 'powerful', 'powering', 'practical', 'practice', 'pragmatic', 'preceptor', 'precise', 'predicting', 'prediction', 'predictive', 'preempt', 'pref', 'prefer', 'preferably', 'preference', 'preferred', 'pregnancy', 'preliminary', 'premier', 'preparation', 'prepare', 'prepared', 'preparing', 'preprocessing', 'prescribe', 'prescriptive', 'present', 'presentation', 'presenting', 'presently', 'pressure', 'preventing', 'prevention', 'previous', 'previously', 'price', 'pricing', 'pride', 'primarily', 'primary', 'principle', 'prior', 'prioritize', 'prioritized', 'prioritizing', 'priority', 'privacy', 'private', 'privately', 'proactive', 'proactively', 'probabilistic', 'probability', 'problem', 'procedure', 'proceeding', 'process', 'processed', 'processing', 'produce', 'produced', 'producing', 'product', 'production', 'productionExcellent', 'productionalizing', 'productively', 'productivity', 'profession', 'professional', 'professionally', 'professionnal', 'proficiency', 'proficient', 'proficiently', 'profile', 'profiling', 'profit', 'profitability', 'profitable', 'program', 'programmer', 'programming', 'progress', 'progression', 'progressive', 'prohibits', 'project', 'projection', 'projectsDemonstrates', 'projectsSoftware', 'promote', 'promotes', 'promoting', 'proof', 'property', 'proposal', 'propose', 'proposed', 'proposition', 'proprietary', 'prospect', 'prospecting', 'prosper', 'protected', 'proteomic', 'proteomics', 'protocol', 'prototype', 'prototyping', 'proud', 'proudly', 'proven', 'provide', 'provided', 'provider', 'provides', 'providing', 'proving', 'psychology', 'public', 'publication', 'publicly', 'publish', 'published', 'publishes', 'publishing', 'pull', 'purchase', 'purchasing', 'purpose', 'pursue', 'pursuing', 'pursuit', 'push', 'put', 'python', 'qPCR', 'qualification', 'qualified', 'qualifying', 'qualitative', 'quality', 'quantitative', 'quarterly', 'query', 'querying', 'quest', 'question', 'quick', 'quickly', 'quit', 'quo', 'rShiny', 'race', 'racial', 'radar', 'random', 'range', 'ranging', 'ranking', 'rapid', 'rapidly', 'rare', 'rate', 'rather', 'rating', 'reach', 'reaction', 'read', 'readable', 'reading', 'reagent', 'real', 'reality', 'realize', 'really', 'reason', 'reasonable', 'reasoning', 'receive', 'received', 'recent', 'recently', 'reciprocally', 'recognition', 'recognize', 'recognized', 'recommend', 'recommendation', 'recommender', 'reconstruction', 'record', 'recordkeeping', 'recovery', 'recruit', 'recruiting', 'recruitment', 'recurring', 'redefine', 'redefining', 'redesigned', 'reduce', 'reducing', 'reduction', 'reference', 'refine', 'refinement', 'reflect', 'reflects', 'regard', 'regarding', 'regardless', 'region', 'regional', 'regression', 'regular', 'regularized', 'regularly', 'regulated', 'regulation', 'regulatory', 'reinforcement', 'reinsurers', 'related', 'relating', 'relational', 'relationship', 'release', 'relentless', 'relevance', 'relevant', 'reliability', 'relies', 'religion', 'religious', 'relocation', 'reltio', 'rely', 'relying', 'remain', 'remaining', 'remote', 'remotely', 'rendered', 'renewable', 'renewal', 'repertoire', 'repetitive', 'replying', 'report', 'reporting', 'repository', 'reposted', 'represent', 'representation', 'representing', 'represents', 'reproduce', 'reproducibility', 'reproducible', 'repurposed', 'request', 'requested', 'require', 'required', 'requirement', 'requires', 'requisition', 'research', 'researcher', 'researching', 'reservist', 'reshape', 'reshaping', 'residence', 'resolution', 'resolve', 'resolving', 'resource', 'resourceful', 'respect', 'respond', 'responding', 'response', 'responsibility', 'responsible', 'rest', 'restore', 'result', 'resultsDesign', 'resume', 'retail', 'retain', 'retaliation', 'retention', 'retrain', 'return', 'reused', 'revenue', 'reversal', 'reverse', 'review', 'reviewed', 'reviewing', 'revolutionize', 'reward', 'rewarding', 'rich', 'richest', 'ride', 'right', 'rigor', 'rigorous', 'risk', 'roadmap', 'robust', 'rockstar', 'role', 'roll', 'room', 'root', 'rounded', 'routine', 'row', 'rule', 'run', 'running', 'runtime', 'rural', 'safe', 'safer', 'safety', 'salary', 'sale', 'sample', 'sampling', 'sanity', 'satellite', 'satisfaction', 'satisfy', 'saving', 'say', 'scalability', 'scalable', 'scale', 'scaling', 'scenario', 'scene', 'schedule', 'schema', 'scholar', 'scholarship', 'school', 'science', 'scientific', 'scientist', 'scope', 'screen', 'screening', 'scripting', 'scrum', 'seamlessly', 'search', 'searchable', 'searching', 'seasoned', 'secondary', 'sector', 'secure', 'security', 'see', 'seeing', 'seek', 'seeker', 'seeking', 'seen', 'segment', 'segmentation', 'segmenting', 'select', 'selected', 'selecting', 'selection', 'sell', 'send', 'senior', 'seniority', 'sense', 'sensing', 'sensitivity', 'sensor', 'sentiment', 'sequencing', 'serf', 'series', 'serious', 'serve', 'service', 'servicing', 'serving', 'set', 'setsKnowledge', 'setting', 'seventeen', 'several', 'sex', 'sexual', 'shape', 'shaping', 'share', 'shared', 'sharing', 'sharp', 'shift', 'ship', 'shipping', 'short', 'shot', 'shower', 'shown', 'shrinking', 'signal', 'signaling', 'significance', 'significant', 'silo', 'similar', 'simple', 'simplest', 'simplicity', 'simplifies', 'simplify', 'simplifying', 'simply', 'simulated', 'simulation', 'simultaneously', 'since', 'single', 'singular', 'sit', 'site', 'sits', 'situation', 'size', 'skill', 'skilled', 'skillsClear', 'skillsTeam', 'sleep', 'sleeve', 'slide', 'small', 'smart', 'smarter', 'social', 'socially', 'sociology', 'software', 'solid', 'solution', 'solve', 'solved', 'solver', 'solves', 'solving', 'someone', 'something', 'soon', 'sophisticated', 'sophomore', 'sound', 'source', 'sourcing', 'space', 'span', 'spanning', 'speak', 'speaker', 'speaking', 'spearheading', 'spec', 'special', 'specialist', 'specialization', 'specialized', 'specializing', 'specialty', 'specific', 'specifically', 'specification', 'specified', 'specifying', 'speech', 'speed', 'spend', 'spending', 'spirit', 'sponsor', 'sponsorship', 'sport', 'spot', 'spotting', 'spouse', 'spreadsheet', 'stack', 'staff', 'stage', 'stakeholder', 'standard', 'standardized', 'standardizing', 'start', 'started', 'starting', 'startup', 'state', 'stateful', 'statement', 'statistic', 'statistical', 'statistician', 'status', 'stay', 'stayed', 'staying', 'steadfast', 'step', 'stepwise', 'stewardship', 'still', 'stimulating', 'stone', 'storage', 'store', 'story', 'storytelling', 'straightforward', 'strategic', 'strategy', 'stratification', 'streaming', 'streamline', 'streamlining', 'street', 'strength', 'strict', 'strictly', 'stride', 'strive', 'strong', 'strongest', 'strongly', 'structure', 'structured', 'stuck', 'student', 'studied', 'study', 'studying', 'style', 'subject', 'submit', 'substantial', 'substantially', 'subtypes', 'succeed', 'succeeding', 'success', 'successful', 'successfully', 'suffer', 'suggest', 'suggestion', 'sum', 'summarize', 'summarized', 'summarizes', 'summarizing', 'summary', 'summer', 'superior', 'supervised', 'supervising', 'supervision', 'supervisor', 'supplement', 'supplier', 'supply', 'support', 'supported', 'supporting', 'supportive', 'sure', 'surface', 'surprise', 'surrounded', 'survey', 'survival', 'sustainable', 'sustained', 'sustaining', 'switch', 'symbiosis', 'symbolic', 'sync', 'synergy', 'synthesis', 'synthesize', 'synthesized', 'synthetic', 'system', 'systematic', 'systematization', 'systemsExtending', 'table', 'tackle', 'tackled', 'tactic', 'take', 'takeaway', 'taking', 'talent', 'talented', 'talk', 'talking', 'tandem', 'tangible', 'target', 'targeted', 'targeting', 'task', 'tasked', 'teach', 'teachable', 'teaching', 'team', 'teammate', 'teamwork', 'tech', 'technical', 'technique', 'techniquesData', 'technological', 'technologiesPractical', 'technologist', 'technology', 'technologyPrior', 'tedious', 'telecom', 'telephone', 'tell', 'temperature', 'ten', 'tenacious', 'term', 'territory', 'test', 'tested', 'testing', 'text', 'thanks', 'theoretical', 'theory', 'therapeutic', 'therapy', 'thereafter', 'therefore', 'thermodynamics', 'thesis', 'thing', 'think', 'thinker', 'thinking', 'third', 'thirty', 'thorough', 'though', 'thought', 'thoughtful', 'thousand', 'threat', 'three', 'threshold', 'thrive', 'thrives', 'throughout', 'tie', 'tight', 'time', 'timeline', 'timely', 'tissue', 'title', 'tobacco', 'today', 'together', 'tolerance', 'tool', 'toolbox', 'tooling', 'toolkits', 'toolsExperience', 'toolsets', 'top', 'topic', 'total', 'touch', 'toward', 'towards', 'toworking', 'track', 'tracking', 'traded', 'tradeoff', 'traditional', 'traffic', 'train', 'trained', 'trainee', 'training', 'trajectory', 'transaction', 'transfer', 'transform', 'transformation', 'transformative', 'transforming', 'transition', 'translate', 'translating', 'translational', 'transparency', 'traumatic', 'travel', 'travelling', 'treat', 'treatment', 'tree', 'tremendous', 'trend', 'trial', 'triangulate', 'trillion', 'troubleshoot', 'troubleshooting', 'trucking', 'truly', 'trust', 'trusted', 'truth', 'try', 'trying', 'tuberculosis', 'tumor', 'tuning', 'turn', 'turning', 'turnover', 'two', 'type', 'typical', 'typically', 'typo', 'u', 'ultimate', 'ultimately', 'unblock', 'uncover', 'undergoing', 'undergraduate', 'underlying', 'underpin', 'underpinnings', 'underrepresentation', 'underrepresented', 'understand', 'understanding', 'understands', 'undertake', 'undertaking', 'underway', 'underwriting', 'unexpected', 'unifying', 'unique', 'unit', 'university', 'unlawful', 'unless', 'unlock', 'unlocking', 'unparalleled', 'unprecedented', 'unrelated', 'unrivaled', 'unsolved', 'unstructured', 'unsupervised', 'untapped', 'unturned', 'unusual', 'upcoming', 'updating', 'upfront', 'uphold', 'upkeep', 'upload', 'uploading', 'upon', 'upsell', 'urban', 'urgency', 'urgent', 'us', 'usability', 'usage', 'use', 'used', 'useful', 'user', 'usercentric', 'using', 'utility', 'utilization', 'utilize', 'utilized', 'utilizing', 'utmost', 'valid', 'validate', 'validated', 'validating', 'validation', 'valuable', 'value', 'valued', 'variable', 'variance', 'variation', 'varied', 'variety', 'various', 'vary', 'varying', 'vast', 'vector', 'velocity', 'vendor', 'venture', 'venue', 'verbal', 'verbally', 'verifiable', 'verification', 'verify', 'verifying', 'versed', 'version', 'vertical', 'veteran', 'via', 'vibrant', 'view', 'virtual', 'virtualization', 'virtualized', 'virtually', 'visibility', 'vision', 'visionary', 'visit', 'visual', 'visualization', 'visualize', 'visualizing', 'visually', 'vital', 'vitro', 'voice', 'volatility', 'volume', 'vulnerability', 'waiting', 'wallet', 'want', 'warehouse', 'waste', 'water', 'way', 'weakness', 'weapon', 'wear', 'web', 'weblog', 'website', 'week', 'weekend', 'weight', 'welcome', 'well', 'wellbeing', 'wherever', 'whole', 'whose', 'wide', 'widely', 'wider', 'willing', 'willingness', 'win', 'wind', 'winning', 'within', 'without', 'word', 'work', 'worked', 'workflow', 'workforce', 'working', 'workload', 'workplace', 'workshop', 'workstreams', 'world', 'worldIs', 'worldwide', 'would', 'wrangling', 'write', 'writing', 'written', 'year', 'yes', 'yet', 'yield']
</code>
# 3) Use Scikit-Learn's CountVectorizer to get word counts for each listing._____no_output_____
<code>
df2["Job Description - Most Common"] = df2["Job Description"].apply(lambda v: FreqDist(v).most_common(20))
df2["Job Description - Most Common"]_____no_output_____
</code>
# 4) Visualize the most common word counts_____no_output_____
<code>
import matplotlib.pyplot as plt
fdist = FreqDist([inner for outer in df2["Job Description"].values for inner in outer])
fdist.plot(30, cumulative=False)
plt.show()_____no_output_____
</code>
# 5) Use Scikit-Learn's tfidfVectorizer to get a TF-IDF feature matrix_____no_output_____
<code>
from sklearn.feature_extraction.text import CountVectorizer
vectorizer = CountVectorizer(stop_words='english')
tfidf = TfidfVectorizer(ngram_range=(1,1), max_features=20)
bag_of_words = tfidf.fit_transform([" ".join(v) for v in df2["Job Description"].values])
df_vec = pd.DataFrame(bag_of_words.toarray(), columns=tfidf.get_feature_names())
df_vec.head()_____no_output_____
</code>
## Stretch Goals
- Scrape Job Listings for the job title "Data Analyst". How do these differ from Data Scientist Job Listings
- Try and identify requirements for experience specific technologies that are asked for in the job listings. How are those distributed among the job listings?
- Use a clustering algorithm to cluster documents by their most important terms. Do the clusters reveal any common themes?
- **Hint:** K-means might not be the best algorithm for this. Do a little bit of research to see what might be good for this. Also, remember that algorithms that depend on Euclidean distance break down with high dimensional data._____no_output_____
| {
"repository": "brit228/AB-Demo",
"path": "module2-Bag-of-Words/LS_DS_422_BOW_Assignment.ipynb",
"matched_keywords": [
"RNA-seq"
],
"stars": null,
"size": 234255,
"hexsha": "d085e8eef8be78a17dc0e37742d8abf61fd6dc86",
"max_line_length": 51729,
"avg_line_length": 100.8850129199,
"alphanum_fraction": 0.5850888988
} |
# Notebook from ShepherdCode/ShepherdML
Path: Workshop/GRU_236.ipynb
# GRU 236
* Operate on 16000 GenCode 34 seqs.
* 5-way cross validation. Save best model per CV.
* Report mean accuracy from final re-validation with best 5.
* Use Adam with a learn rate decay schdule._____no_output_____
<code>
NC_FILENAME='ncRNA.gc34.processed.fasta'
PC_FILENAME='pcRNA.gc34.processed.fasta'
DATAPATH=""
try:
from google.colab import drive
IN_COLAB = True
PATH='/content/drive/'
drive.mount(PATH)
DATAPATH=PATH+'My Drive/data/' # must end in "/"
NC_FILENAME = DATAPATH+NC_FILENAME
PC_FILENAME = DATAPATH+PC_FILENAME
except:
IN_COLAB = False
DATAPATH=""
EPOCHS=200
SPLITS=5
K=3
VOCABULARY_SIZE=4**K+1 # e.g. K=3 => 64 DNA K-mers + 'NNN'
EMBED_DIMEN=16
FILENAME='GRU236'
NEURONS=64
ACT="tanh"
DROP=0.5
Mounted at /content/drive/
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import ShuffleSplit
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import RepeatedKFold
from sklearn.model_selection import StratifiedKFold
import tensorflow as tf
from tensorflow import keras
from keras.wrappers.scikit_learn import KerasRegressor
from keras.models import Sequential
from keras.layers import Bidirectional
from keras.layers import GRU
from keras.layers import Dense
from keras.layers import LayerNormalization
import time
dt='float32'
tf.keras.backend.set_floatx(dt)_____no_output_____
</code>
## Build model_____no_output_____
<code>
def compile_model(model):
adam_default_learn_rate = 0.001
schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate = adam_default_learn_rate*10,
#decay_steps=100000, decay_rate=0.96, staircase=True)
decay_steps=10000, decay_rate=0.99, staircase=True)
# learn rate = initial_learning_rate * decay_rate ^ (step / decay_steps)
alrd = tf.keras.optimizers.Adam(learning_rate=schedule)
bc=tf.keras.losses.BinaryCrossentropy(from_logits=False)
print("COMPILE...")
#model.compile(loss=bc, optimizer=alrd, metrics=["accuracy"])
model.compile(loss=bc, optimizer="adam", metrics=["accuracy"])
print("...COMPILED")
return model
def build_model():
embed_layer = keras.layers.Embedding(
#VOCABULARY_SIZE, EMBED_DIMEN, input_length=1000, input_length=1000, mask_zero=True)
#input_dim=[None,VOCABULARY_SIZE], output_dim=EMBED_DIMEN, mask_zero=True)
input_dim=VOCABULARY_SIZE, output_dim=EMBED_DIMEN, mask_zero=True)
#rnn1_layer = keras.layers.Bidirectional(
rnn1_layer = keras.layers.GRU(NEURONS, return_sequences=True,
input_shape=[1000,EMBED_DIMEN], activation=ACT, dropout=DROP)#)#bi
#rnn2_layer = keras.layers.Bidirectional(
rnn2_layer = keras.layers.GRU(NEURONS, return_sequences=False,
activation=ACT, dropout=DROP)#)#bi
dense1_layer = keras.layers.Dense(NEURONS, activation=ACT,dtype=dt)
#drop1_layer = keras.layers.Dropout(DROP)
dense2_layer = keras.layers.Dense(NEURONS, activation=ACT,dtype=dt)
#drop2_layer = keras.layers.Dropout(DROP)
output_layer = keras.layers.Dense(1, activation="sigmoid", dtype=dt)
mlp = keras.models.Sequential()
mlp.add(embed_layer)
mlp.add(rnn1_layer)
mlp.add(rnn2_layer)
mlp.add(dense1_layer)
#mlp.add(drop1_layer)
mlp.add(dense2_layer)
#mlp.add(drop2_layer)
mlp.add(output_layer)
mlpc = compile_model(mlp)
return mlpc_____no_output_____
</code>
## Load and partition sequences_____no_output_____
<code>
# Assume file was preprocessed to contain one line per seq.
# Prefer Pandas dataframe but df does not support append.
# For conversion to tensor, must avoid python lists.
def load_fasta(filename,label):
DEFLINE='>'
labels=[]
seqs=[]
lens=[]
nums=[]
num=0
with open (filename,'r') as infile:
for line in infile:
if line[0]!=DEFLINE:
seq=line.rstrip()
num += 1 # first seqnum is 1
seqlen=len(seq)
nums.append(num)
labels.append(label)
seqs.append(seq)
lens.append(seqlen)
df1=pd.DataFrame(nums,columns=['seqnum'])
df2=pd.DataFrame(labels,columns=['class'])
df3=pd.DataFrame(seqs,columns=['sequence'])
df4=pd.DataFrame(lens,columns=['seqlen'])
df=pd.concat((df1,df2,df3,df4),axis=1)
return df
def separate_X_and_y(data):
y= data[['class']].copy()
X= data.drop(columns=['class','seqnum','seqlen'])
return (X,y)
_____no_output_____
</code>
## Make K-mers_____no_output_____
<code>
def make_kmer_table(K):
npad='N'*K
shorter_kmers=['']
for i in range(K):
longer_kmers=[]
for mer in shorter_kmers:
longer_kmers.append(mer+'A')
longer_kmers.append(mer+'C')
longer_kmers.append(mer+'G')
longer_kmers.append(mer+'T')
shorter_kmers = longer_kmers
all_kmers = shorter_kmers
kmer_dict = {}
kmer_dict[npad]=0
value=1
for mer in all_kmers:
kmer_dict[mer]=value
value += 1
return kmer_dict
KMER_TABLE=make_kmer_table(K)
def strings_to_vectors(data,uniform_len):
all_seqs=[]
for seq in data['sequence']:
i=0
seqlen=len(seq)
kmers=[]
while i < seqlen-K+1 -1: # stop at minus one for spaced seed
#kmer=seq[i:i+2]+seq[i+3:i+5] # SPACED SEED 2/1/2 for K=4
kmer=seq[i:i+K]
i += 1
value=KMER_TABLE[kmer]
kmers.append(value)
pad_val=0
while i < uniform_len:
kmers.append(pad_val)
i += 1
all_seqs.append(kmers)
pd2d=pd.DataFrame(all_seqs)
return pd2d # return 2D dataframe, uniform dimensions_____no_output_____def make_kmers(MAXLEN,train_set):
(X_train_all,y_train_all)=separate_X_and_y(train_set)
X_train_kmers=strings_to_vectors(X_train_all,MAXLEN)
# From pandas dataframe to numpy to list to numpy
num_seqs=len(X_train_kmers)
tmp_seqs=[]
for i in range(num_seqs):
kmer_sequence=X_train_kmers.iloc[i]
tmp_seqs.append(kmer_sequence)
X_train_kmers=np.array(tmp_seqs)
tmp_seqs=None
labels=y_train_all.to_numpy()
return (X_train_kmers,labels)_____no_output_____def make_frequencies(Xin):
Xout=[]
VOCABULARY_SIZE= 4**K + 1 # plus one for 'NNN'
for seq in Xin:
freqs =[0] * VOCABULARY_SIZE
total = 0
for kmerval in seq:
freqs[kmerval] += 1
total += 1
for c in range(VOCABULARY_SIZE):
freqs[c] = freqs[c]/total
Xout.append(freqs)
Xnum = np.asarray(Xout)
return (Xnum)
def make_slice(data_set,min_len,max_len):
slice = data_set.query('seqlen <= '+str(max_len)+' & seqlen>= '+str(min_len))
return slice_____no_output_____
</code>
## Cross validation_____no_output_____
<code>
def do_cross_validation(X,y,given_model):
cv_scores = []
fold=0
splitter = ShuffleSplit(n_splits=SPLITS, test_size=0.1) #, random_state=37863)
for train_index,valid_index in splitter.split(X):
fold += 1
X_train=X[train_index] # use iloc[] for dataframe
y_train=y[train_index]
X_valid=X[valid_index]
y_valid=y[valid_index]
# Avoid continually improving the same model.
model = compile_model(keras.models.clone_model(given_model))
bestname=DATAPATH+FILENAME+".cv."+str(fold)+".best"
mycallbacks = [keras.callbacks.ModelCheckpoint(
filepath=bestname, save_best_only=True,
monitor='val_accuracy', mode='max')]
print("FIT")
start_time=time.time()
history=model.fit(X_train, y_train, # batch_size=10, default=32 works nicely
epochs=EPOCHS, verbose=1, # verbose=1 for ascii art, verbose=0 for none
callbacks=mycallbacks,
validation_data=(X_valid,y_valid) )
end_time=time.time()
elapsed_time=(end_time-start_time)
print("Fold %d, %d epochs, %d sec"%(fold,EPOCHS,elapsed_time))
pd.DataFrame(history.history).plot(figsize=(8,5))
plt.grid(True)
plt.gca().set_ylim(0,1)
plt.show()
best_model=keras.models.load_model(bestname)
scores = best_model.evaluate(X_valid, y_valid, verbose=0)
print("%s: %.2f%%" % (best_model.metrics_names[1], scores[1]*100))
cv_scores.append(scores[1] * 100)
print()
print("%d-way Cross Validation mean %.2f%% (+/- %.2f%%)" % (fold, np.mean(cv_scores), np.std(cv_scores)))_____no_output_____
</code>
## Train on RNA lengths 200-1Kb_____no_output_____
<code>
MINLEN=200
MAXLEN=1000
print("Load data from files.")
nc_seq=load_fasta(NC_FILENAME,0)
pc_seq=load_fasta(PC_FILENAME,1)
train_set=pd.concat((nc_seq,pc_seq),axis=0)
nc_seq=None
pc_seq=None
print("Ready: train_set")
#train_set
subset=make_slice(train_set,MINLEN,MAXLEN)# One array to two: X and y
print ("Data reshape")
(X_train,y_train)=make_kmers(MAXLEN,subset)
#print ("Data prep")
#X_train=make_frequencies(X_train)Load data from files.
Ready: train_set
Data reshape
print ("Compile the model")
model=build_model()
print ("Summarize the model")
print(model.summary()) # Print this only once
model.save(DATAPATH+FILENAME+'.model')
Compile the model
COMPILE...
...COMPILED
Summarize the model
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
embedding (Embedding) (None, None, 16) 1040
_________________________________________________________________
gru (GRU) (None, None, 64) 15744
_________________________________________________________________
gru_1 (GRU) (None, 64) 24960
_________________________________________________________________
dense (Dense) (None, 64) 4160
_________________________________________________________________
dense_1 (Dense) (None, 64) 4160
_________________________________________________________________
dense_2 (Dense) (None, 1) 65
=================================================================
Total params: 50,129
Trainable params: 50,129
Non-trainable params: 0
_________________________________________________________________
None
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.model/assets
print ("Cross valiation")
do_cross_validation(X_train,y_train,model)
print ("Done")Cross valiation
COMPILE...
...COMPILED
FIT
Epoch 1/200
453/453 [==============================] - ETA: 0s - loss: 0.6361 - accuracy: 0.6476INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 52s 114ms/step - loss: 0.6361 - accuracy: 0.6476 - val_loss: 0.6820 - val_accuracy: 0.4444
Epoch 2/200
453/453 [==============================] - ETA: 0s - loss: 0.6361 - accuracy: 0.6537INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.6361 - accuracy: 0.6537 - val_loss: 0.6581 - val_accuracy: 0.6344
Epoch 3/200
453/453 [==============================] - ETA: 0s - loss: 0.6074 - accuracy: 0.6732INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 107ms/step - loss: 0.6074 - accuracy: 0.6732 - val_loss: 0.5766 - val_accuracy: 0.7207
Epoch 4/200
453/453 [==============================] - ETA: 0s - loss: 0.4915 - accuracy: 0.7705INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 49s 107ms/step - loss: 0.4915 - accuracy: 0.7705 - val_loss: 0.4512 - val_accuracy: 0.7958
Epoch 5/200
453/453 [==============================] - ETA: 0s - loss: 0.4407 - accuracy: 0.8014INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.4407 - accuracy: 0.8014 - val_loss: 0.4440 - val_accuracy: 0.8076
Epoch 6/200
453/453 [==============================] - 33s 72ms/step - loss: 0.4270 - accuracy: 0.8065 - val_loss: 0.4494 - val_accuracy: 0.7827
Epoch 7/200
453/453 [==============================] - ETA: 0s - loss: 0.4182 - accuracy: 0.8125INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.4182 - accuracy: 0.8125 - val_loss: 0.3862 - val_accuracy: 0.8268
Epoch 8/200
453/453 [==============================] - 33s 73ms/step - loss: 0.4056 - accuracy: 0.8207 - val_loss: 0.3965 - val_accuracy: 0.8237
Epoch 9/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3990 - accuracy: 0.8232 - val_loss: 0.3983 - val_accuracy: 0.8206
Epoch 10/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3949 - accuracy: 0.8276 - val_loss: 0.4184 - val_accuracy: 0.8094
Epoch 11/200
453/453 [==============================] - ETA: 0s - loss: 0.3903 - accuracy: 0.8263INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 49s 108ms/step - loss: 0.3903 - accuracy: 0.8263 - val_loss: 0.3728 - val_accuracy: 0.8312
Epoch 12/200
453/453 [==============================] - ETA: 0s - loss: 0.3849 - accuracy: 0.8271INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 49s 107ms/step - loss: 0.3849 - accuracy: 0.8271 - val_loss: 0.3765 - val_accuracy: 0.8324
Epoch 13/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3824 - accuracy: 0.8290 - val_loss: 0.3780 - val_accuracy: 0.8305
Epoch 14/200
453/453 [==============================] - ETA: 0s - loss: 0.3787 - accuracy: 0.8349INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 49s 107ms/step - loss: 0.3787 - accuracy: 0.8349 - val_loss: 0.3623 - val_accuracy: 0.8367
Epoch 15/200
453/453 [==============================] - ETA: 0s - loss: 0.3752 - accuracy: 0.8352INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 105ms/step - loss: 0.3752 - accuracy: 0.8352 - val_loss: 0.3562 - val_accuracy: 0.8448
Epoch 16/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3752 - accuracy: 0.8323 - val_loss: 0.3541 - val_accuracy: 0.8430
Epoch 17/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3685 - accuracy: 0.8353 - val_loss: 0.4017 - val_accuracy: 0.8125
Epoch 18/200
453/453 [==============================] - ETA: 0s - loss: 0.3584 - accuracy: 0.8450INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 107ms/step - loss: 0.3584 - accuracy: 0.8450 - val_loss: 0.3387 - val_accuracy: 0.8529
Epoch 19/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3498 - accuracy: 0.8490 - val_loss: 0.3511 - val_accuracy: 0.8479
Epoch 20/200
453/453 [==============================] - ETA: 0s - loss: 0.3440 - accuracy: 0.8515INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 105ms/step - loss: 0.3440 - accuracy: 0.8515 - val_loss: 0.3271 - val_accuracy: 0.8628
Epoch 21/200
453/453 [==============================] - ETA: 0s - loss: 0.3305 - accuracy: 0.8592INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 105ms/step - loss: 0.3305 - accuracy: 0.8592 - val_loss: 0.3013 - val_accuracy: 0.8715
Epoch 22/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3209 - accuracy: 0.8655 - val_loss: 0.3096 - val_accuracy: 0.8715
Epoch 23/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3288 - accuracy: 0.8620 - val_loss: 0.3177 - val_accuracy: 0.8547
Epoch 24/200
453/453 [==============================] - ETA: 0s - loss: 0.3139 - accuracy: 0.8687INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.3139 - accuracy: 0.8687 - val_loss: 0.3002 - val_accuracy: 0.8790
Epoch 25/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3052 - accuracy: 0.8733 - val_loss: 0.4355 - val_accuracy: 0.7896
Epoch 26/200
453/453 [==============================] - 33s 73ms/step - loss: 0.4059 - accuracy: 0.8172 - val_loss: 0.3680 - val_accuracy: 0.8417
Epoch 27/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3964 - accuracy: 0.8258 - val_loss: 0.3708 - val_accuracy: 0.8367
Epoch 28/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3360 - accuracy: 0.8547 - val_loss: 0.3075 - val_accuracy: 0.8746
Epoch 29/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3511 - accuracy: 0.8504 - val_loss: 0.3812 - val_accuracy: 0.8367
Epoch 30/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3702 - accuracy: 0.8400 - val_loss: 0.3504 - val_accuracy: 0.8510
Epoch 31/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3399 - accuracy: 0.8569 - val_loss: 0.3377 - val_accuracy: 0.8560
Epoch 32/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3298 - accuracy: 0.8584 - val_loss: 0.3280 - val_accuracy: 0.8610
Epoch 33/200
453/453 [==============================] - ETA: 0s - loss: 0.3089 - accuracy: 0.8731INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 105ms/step - loss: 0.3089 - accuracy: 0.8731 - val_loss: 0.2890 - val_accuracy: 0.8895
Epoch 34/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2939 - accuracy: 0.8825 - val_loss: 0.2793 - val_accuracy: 0.8895
Epoch 35/200
453/453 [==============================] - ETA: 0s - loss: 0.2943 - accuracy: 0.8837INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.2943 - accuracy: 0.8837 - val_loss: 0.2592 - val_accuracy: 0.9032
Epoch 36/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2844 - accuracy: 0.8875 - val_loss: 0.3216 - val_accuracy: 0.8727
Epoch 37/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3080 - accuracy: 0.8728 - val_loss: 0.2746 - val_accuracy: 0.8920
Epoch 38/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2825 - accuracy: 0.8893 - val_loss: 0.2770 - val_accuracy: 0.8945
Epoch 39/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3002 - accuracy: 0.8799 - val_loss: 0.2753 - val_accuracy: 0.8870
Epoch 40/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3032 - accuracy: 0.8760 - val_loss: 0.3113 - val_accuracy: 0.8678
Epoch 41/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3115 - accuracy: 0.8727 - val_loss: 0.2946 - val_accuracy: 0.8759
Epoch 42/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3031 - accuracy: 0.8763 - val_loss: 0.2835 - val_accuracy: 0.8821
Epoch 43/200
453/453 [==============================] - ETA: 0s - loss: 0.2934 - accuracy: 0.8837INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 105ms/step - loss: 0.2934 - accuracy: 0.8837 - val_loss: 0.2572 - val_accuracy: 0.9094
Epoch 44/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2627 - accuracy: 0.8993 - val_loss: 0.2596 - val_accuracy: 0.9038
Epoch 45/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3563 - accuracy: 0.8519 - val_loss: 0.3042 - val_accuracy: 0.8727
Epoch 46/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2846 - accuracy: 0.8870 - val_loss: 0.2589 - val_accuracy: 0.9044
Epoch 47/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2705 - accuracy: 0.8946 - val_loss: 0.2459 - val_accuracy: 0.9088
Epoch 48/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2997 - accuracy: 0.8766 - val_loss: 0.2931 - val_accuracy: 0.8808
Epoch 49/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2876 - accuracy: 0.8814 - val_loss: 0.2637 - val_accuracy: 0.8988
Epoch 50/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2964 - accuracy: 0.8779 - val_loss: 0.2561 - val_accuracy: 0.9050
Epoch 51/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2800 - accuracy: 0.8887 - val_loss: 0.2525 - val_accuracy: 0.9056
Epoch 52/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2562 - accuracy: 0.8995 - val_loss: 0.2413 - val_accuracy: 0.9088
Epoch 53/200
453/453 [==============================] - ETA: 0s - loss: 0.2444 - accuracy: 0.9054INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 104ms/step - loss: 0.2444 - accuracy: 0.9054 - val_loss: 0.2289 - val_accuracy: 0.9143
Epoch 54/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3011 - accuracy: 0.8773 - val_loss: 0.3016 - val_accuracy: 0.8790
Epoch 55/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3377 - accuracy: 0.8550 - val_loss: 0.3758 - val_accuracy: 0.8206
Epoch 56/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2664 - accuracy: 0.8914 - val_loss: 0.2604 - val_accuracy: 0.8932
Epoch 57/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2692 - accuracy: 0.8956 - val_loss: 0.2441 - val_accuracy: 0.9032
Epoch 58/200
453/453 [==============================] - ETA: 0s - loss: 0.2466 - accuracy: 0.9035INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 103ms/step - loss: 0.2466 - accuracy: 0.9035 - val_loss: 0.2407 - val_accuracy: 0.9156
Epoch 59/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2770 - accuracy: 0.8887 - val_loss: 0.3081 - val_accuracy: 0.8684
Epoch 60/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3007 - accuracy: 0.8770 - val_loss: 0.2347 - val_accuracy: 0.9112
Epoch 61/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2542 - accuracy: 0.8995 - val_loss: 0.2328 - val_accuracy: 0.9156
Epoch 62/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2549 - accuracy: 0.9013 - val_loss: 0.2530 - val_accuracy: 0.8994
Epoch 63/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2479 - accuracy: 0.9072 - val_loss: 0.2357 - val_accuracy: 0.9137
Epoch 64/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2474 - accuracy: 0.9055 - val_loss: 0.2781 - val_accuracy: 0.8901
Epoch 65/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2715 - accuracy: 0.8914 - val_loss: 0.2851 - val_accuracy: 0.8845
Epoch 66/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3346 - accuracy: 0.8547 - val_loss: 0.3348 - val_accuracy: 0.8579
Epoch 67/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3540 - accuracy: 0.8488 - val_loss: 0.3053 - val_accuracy: 0.8777
Epoch 68/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3332 - accuracy: 0.8615 - val_loss: 0.3003 - val_accuracy: 0.8802
Epoch 69/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3233 - accuracy: 0.8657 - val_loss: 0.2952 - val_accuracy: 0.8808
Epoch 70/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3114 - accuracy: 0.8715 - val_loss: 0.2855 - val_accuracy: 0.8821
Epoch 71/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3067 - accuracy: 0.8719 - val_loss: 0.3043 - val_accuracy: 0.8759
Epoch 72/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3077 - accuracy: 0.8741 - val_loss: 0.2769 - val_accuracy: 0.8920
Epoch 73/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2914 - accuracy: 0.8808 - val_loss: 0.2751 - val_accuracy: 0.8994
Epoch 74/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2850 - accuracy: 0.8830 - val_loss: 0.2705 - val_accuracy: 0.8951
Epoch 75/200
453/453 [==============================] - ETA: 0s - loss: 0.2546 - accuracy: 0.8995INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 47s 104ms/step - loss: 0.2546 - accuracy: 0.8995 - val_loss: 0.2307 - val_accuracy: 0.9187
Epoch 76/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2371 - accuracy: 0.9081 - val_loss: 0.2644 - val_accuracy: 0.8870
Epoch 77/200
453/453 [==============================] - ETA: 0s - loss: 0.2303 - accuracy: 0.9131INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.2303 - accuracy: 0.9131 - val_loss: 0.2201 - val_accuracy: 0.9236
Epoch 78/200
453/453 [==============================] - ETA: 0s - loss: 0.2203 - accuracy: 0.9173INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.2203 - accuracy: 0.9173 - val_loss: 0.2126 - val_accuracy: 0.9274
Epoch 79/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2226 - accuracy: 0.9149 - val_loss: 0.2206 - val_accuracy: 0.9224
Epoch 80/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2166 - accuracy: 0.9188 - val_loss: 0.2090 - val_accuracy: 0.9230
Epoch 81/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2085 - accuracy: 0.9211 - val_loss: 0.2209 - val_accuracy: 0.9218
Epoch 82/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2060 - accuracy: 0.9228 - val_loss: 0.2061 - val_accuracy: 0.9261
Epoch 83/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2011 - accuracy: 0.9273 - val_loss: 0.2266 - val_accuracy: 0.9150
Epoch 84/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2049 - accuracy: 0.9247 - val_loss: 0.2163 - val_accuracy: 0.9205
Epoch 85/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2038 - accuracy: 0.9238 - val_loss: 0.2144 - val_accuracy: 0.9249
Epoch 86/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2003 - accuracy: 0.9262 - val_loss: 0.2374 - val_accuracy: 0.9174
Epoch 87/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2012 - accuracy: 0.9253 - val_loss: 0.2187 - val_accuracy: 0.9156
Epoch 88/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1979 - accuracy: 0.9265 - val_loss: 0.2169 - val_accuracy: 0.9236
Epoch 89/200
453/453 [==============================] - ETA: 0s - loss: 0.2017 - accuracy: 0.9260INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 48s 106ms/step - loss: 0.2017 - accuracy: 0.9260 - val_loss: 0.2008 - val_accuracy: 0.9280
Epoch 90/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3862 - accuracy: 0.8240 - val_loss: 0.4660 - val_accuracy: 0.7834
Epoch 91/200
453/453 [==============================] - 33s 73ms/step - loss: 0.4274 - accuracy: 0.8015 - val_loss: 0.4335 - val_accuracy: 0.8026
Epoch 92/200
453/453 [==============================] - 32s 71ms/step - loss: 0.4130 - accuracy: 0.8116 - val_loss: 0.4077 - val_accuracy: 0.8187
Epoch 93/200
453/453 [==============================] - 33s 73ms/step - loss: 0.4022 - accuracy: 0.8199 - val_loss: 0.3873 - val_accuracy: 0.8231
Epoch 94/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3994 - accuracy: 0.8193 - val_loss: 0.4020 - val_accuracy: 0.8138
Epoch 95/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3977 - accuracy: 0.8217 - val_loss: 0.4060 - val_accuracy: 0.8150
Epoch 96/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3861 - accuracy: 0.8283 - val_loss: 0.3699 - val_accuracy: 0.8367
Epoch 97/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3870 - accuracy: 0.8236 - val_loss: 0.3630 - val_accuracy: 0.8386
Epoch 98/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3778 - accuracy: 0.8354 - val_loss: 0.3748 - val_accuracy: 0.8256
Epoch 99/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3815 - accuracy: 0.8291 - val_loss: 0.3540 - val_accuracy: 0.8442
Epoch 100/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3721 - accuracy: 0.8351 - val_loss: 0.3656 - val_accuracy: 0.8330
Epoch 101/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3634 - accuracy: 0.8425 - val_loss: 0.3476 - val_accuracy: 0.8436
Epoch 102/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3709 - accuracy: 0.8338 - val_loss: 0.3624 - val_accuracy: 0.8417
Epoch 103/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3678 - accuracy: 0.8390 - val_loss: 0.3582 - val_accuracy: 0.8430
Epoch 104/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3641 - accuracy: 0.8413 - val_loss: 0.3494 - val_accuracy: 0.8467
Epoch 105/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3594 - accuracy: 0.8419 - val_loss: 0.3977 - val_accuracy: 0.8045
Epoch 106/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3557 - accuracy: 0.8452 - val_loss: 0.3947 - val_accuracy: 0.8268
Epoch 107/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3435 - accuracy: 0.8503 - val_loss: 0.3266 - val_accuracy: 0.8591
Epoch 108/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3333 - accuracy: 0.8559 - val_loss: 0.3238 - val_accuracy: 0.8603
Epoch 109/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3276 - accuracy: 0.8606 - val_loss: 0.3368 - val_accuracy: 0.8541
Epoch 110/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3140 - accuracy: 0.8667 - val_loss: 0.3181 - val_accuracy: 0.8647
Epoch 111/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2941 - accuracy: 0.8804 - val_loss: 0.2946 - val_accuracy: 0.8808
Epoch 112/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3340 - accuracy: 0.8541 - val_loss: 0.3607 - val_accuracy: 0.8479
Epoch 113/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3407 - accuracy: 0.8520 - val_loss: 0.3399 - val_accuracy: 0.8566
Epoch 114/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3227 - accuracy: 0.8621 - val_loss: 0.3099 - val_accuracy: 0.8740
Epoch 115/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2947 - accuracy: 0.8771 - val_loss: 0.3164 - val_accuracy: 0.8709
Epoch 116/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3010 - accuracy: 0.8762 - val_loss: 0.3231 - val_accuracy: 0.8572
Epoch 117/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3172 - accuracy: 0.8633 - val_loss: 0.4079 - val_accuracy: 0.8150
Epoch 118/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3056 - accuracy: 0.8742 - val_loss: 0.2704 - val_accuracy: 0.8982
Epoch 119/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2652 - accuracy: 0.8950 - val_loss: 0.2657 - val_accuracy: 0.8988
Epoch 120/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2660 - accuracy: 0.8925 - val_loss: 0.2614 - val_accuracy: 0.8957
Epoch 121/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2624 - accuracy: 0.8943 - val_loss: 0.2864 - val_accuracy: 0.8945
Epoch 122/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2593 - accuracy: 0.8961 - val_loss: 0.2685 - val_accuracy: 0.8970
Epoch 123/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2563 - accuracy: 0.8982 - val_loss: 0.2534 - val_accuracy: 0.9050
Epoch 124/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2522 - accuracy: 0.8997 - val_loss: 0.3743 - val_accuracy: 0.8386
Epoch 125/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3451 - accuracy: 0.8499 - val_loss: 0.3622 - val_accuracy: 0.8405
Epoch 126/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3437 - accuracy: 0.8478 - val_loss: 0.3452 - val_accuracy: 0.8504
Epoch 127/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3498 - accuracy: 0.8479 - val_loss: 0.3665 - val_accuracy: 0.8442
Epoch 128/200
453/453 [==============================] - 33s 73ms/step - loss: 0.3387 - accuracy: 0.8529 - val_loss: 0.3811 - val_accuracy: 0.8330
Epoch 129/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3332 - accuracy: 0.8557 - val_loss: 0.3629 - val_accuracy: 0.8461
Epoch 130/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3311 - accuracy: 0.8564 - val_loss: 0.3955 - val_accuracy: 0.8250
Epoch 131/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3238 - accuracy: 0.8600 - val_loss: 0.3997 - val_accuracy: 0.8206
Epoch 132/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3182 - accuracy: 0.8659 - val_loss: 0.3415 - val_accuracy: 0.8516
Epoch 133/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2993 - accuracy: 0.8743 - val_loss: 0.3430 - val_accuracy: 0.8591
Epoch 134/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2918 - accuracy: 0.8814 - val_loss: 0.3175 - val_accuracy: 0.8690
Epoch 135/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2807 - accuracy: 0.8859 - val_loss: 0.2961 - val_accuracy: 0.8696
Epoch 136/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2600 - accuracy: 0.8949 - val_loss: 0.3006 - val_accuracy: 0.8709
Epoch 137/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2596 - accuracy: 0.8937 - val_loss: 0.2792 - val_accuracy: 0.8839
Epoch 138/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2749 - accuracy: 0.8899 - val_loss: 0.3704 - val_accuracy: 0.8516
Epoch 139/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2994 - accuracy: 0.8744 - val_loss: 0.3465 - val_accuracy: 0.8653
Epoch 140/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2747 - accuracy: 0.8885 - val_loss: 0.3200 - val_accuracy: 0.8709
Epoch 141/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2746 - accuracy: 0.8887 - val_loss: 0.2957 - val_accuracy: 0.8703
Epoch 142/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2660 - accuracy: 0.8937 - val_loss: 0.2784 - val_accuracy: 0.8858
Epoch 143/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2864 - accuracy: 0.8824 - val_loss: 0.3955 - val_accuracy: 0.8163
Epoch 144/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2751 - accuracy: 0.8898 - val_loss: 0.2637 - val_accuracy: 0.9007
Epoch 145/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2642 - accuracy: 0.8946 - val_loss: 0.3197 - val_accuracy: 0.8690
Epoch 146/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3362 - accuracy: 0.8566 - val_loss: 0.4848 - val_accuracy: 0.7747
Epoch 147/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3824 - accuracy: 0.8305 - val_loss: 0.4688 - val_accuracy: 0.7877
Epoch 148/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3676 - accuracy: 0.8376 - val_loss: 0.4588 - val_accuracy: 0.7921
Epoch 149/200
453/453 [==============================] - 32s 70ms/step - loss: 0.3640 - accuracy: 0.8393 - val_loss: 0.4436 - val_accuracy: 0.8020
Epoch 150/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3555 - accuracy: 0.8442 - val_loss: 0.4453 - val_accuracy: 0.8014
Epoch 151/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3463 - accuracy: 0.8488 - val_loss: 0.4561 - val_accuracy: 0.8001
Epoch 152/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3436 - accuracy: 0.8518 - val_loss: 0.4243 - val_accuracy: 0.8088
Epoch 153/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3349 - accuracy: 0.8537 - val_loss: 0.4339 - val_accuracy: 0.8038
Epoch 154/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3358 - accuracy: 0.8547 - val_loss: 0.3917 - val_accuracy: 0.8293
Epoch 155/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3231 - accuracy: 0.8607 - val_loss: 0.4251 - val_accuracy: 0.8101
Epoch 156/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3087 - accuracy: 0.8675 - val_loss: 0.3938 - val_accuracy: 0.8256
Epoch 157/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2943 - accuracy: 0.8757 - val_loss: 0.3869 - val_accuracy: 0.8423
Epoch 158/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2853 - accuracy: 0.8818 - val_loss: 0.3463 - val_accuracy: 0.8628
Epoch 159/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2929 - accuracy: 0.8802 - val_loss: 0.3623 - val_accuracy: 0.8597
Epoch 160/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2717 - accuracy: 0.8900 - val_loss: 0.3340 - val_accuracy: 0.8709
Epoch 161/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2902 - accuracy: 0.8796 - val_loss: 0.3680 - val_accuracy: 0.8634
Epoch 162/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2825 - accuracy: 0.8853 - val_loss: 0.2839 - val_accuracy: 0.8895
Epoch 163/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2530 - accuracy: 0.9017 - val_loss: 0.3017 - val_accuracy: 0.8870
Epoch 164/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2376 - accuracy: 0.9073 - val_loss: 0.2877 - val_accuracy: 0.8982
Epoch 165/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2342 - accuracy: 0.9077 - val_loss: 0.2999 - val_accuracy: 0.8908
Epoch 166/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2280 - accuracy: 0.9124 - val_loss: 0.2905 - val_accuracy: 0.8963
Epoch 167/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2210 - accuracy: 0.9169 - val_loss: 0.2462 - val_accuracy: 0.9131
Epoch 168/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2076 - accuracy: 0.9211 - val_loss: 0.2616 - val_accuracy: 0.9075
Epoch 169/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2130 - accuracy: 0.9213 - val_loss: 0.2492 - val_accuracy: 0.9162
Epoch 170/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2037 - accuracy: 0.9237 - val_loss: 0.2282 - val_accuracy: 0.9199
Epoch 171/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2079 - accuracy: 0.9197 - val_loss: 0.2770 - val_accuracy: 0.9038
Epoch 172/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2092 - accuracy: 0.9212 - val_loss: 0.2486 - val_accuracy: 0.9094
Epoch 173/200
453/453 [==============================] - 33s 73ms/step - loss: 0.2102 - accuracy: 0.9200 - val_loss: 0.2252 - val_accuracy: 0.9187
Epoch 174/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2043 - accuracy: 0.9215 - val_loss: 0.2357 - val_accuracy: 0.9181
Epoch 175/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1917 - accuracy: 0.9293 - val_loss: 0.2591 - val_accuracy: 0.9032
Epoch 176/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1895 - accuracy: 0.9301 - val_loss: 0.2720 - val_accuracy: 0.9007
Epoch 177/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1861 - accuracy: 0.9320 - val_loss: 0.2412 - val_accuracy: 0.9205
Epoch 178/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1812 - accuracy: 0.9339 - val_loss: 0.2353 - val_accuracy: 0.9187
Epoch 179/200
453/453 [==============================] - 32s 72ms/step - loss: 0.1829 - accuracy: 0.9316 - val_loss: 0.2412 - val_accuracy: 0.9199
Epoch 180/200
453/453 [==============================] - 32s 71ms/step - loss: 0.1752 - accuracy: 0.9332 - val_loss: 0.2251 - val_accuracy: 0.9218
Epoch 181/200
453/453 [==============================] - 33s 72ms/step - loss: 0.1743 - accuracy: 0.9372 - val_loss: 0.2271 - val_accuracy: 0.9199
Epoch 182/200
453/453 [==============================] - ETA: 0s - loss: 0.1752 - accuracy: 0.9351INFO:tensorflow:Assets written to: /content/drive/My Drive/data/GRU215r.cv.1.best/assets
453/453 [==============================] - 49s 107ms/step - loss: 0.1752 - accuracy: 0.9351 - val_loss: 0.2098 - val_accuracy: 0.9286
Epoch 183/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2744 - accuracy: 0.8864 - val_loss: 0.3062 - val_accuracy: 0.8814
Epoch 184/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3304 - accuracy: 0.8568 - val_loss: 0.4067 - val_accuracy: 0.8250
Epoch 185/200
453/453 [==============================] - 33s 72ms/step - loss: 0.3265 - accuracy: 0.8588 - val_loss: 0.3853 - val_accuracy: 0.8417
Epoch 186/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3194 - accuracy: 0.8590 - val_loss: 0.3962 - val_accuracy: 0.8293
Epoch 187/200
453/453 [==============================] - 32s 71ms/step - loss: 0.3101 - accuracy: 0.8684 - val_loss: 0.3825 - val_accuracy: 0.8405
Epoch 188/200
453/453 [==============================] - 32s 72ms/step - loss: 0.3002 - accuracy: 0.8739 - val_loss: 0.3746 - val_accuracy: 0.8485
Epoch 189/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2937 - accuracy: 0.8748 - val_loss: 0.3289 - val_accuracy: 0.8665
Epoch 190/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2769 - accuracy: 0.8846 - val_loss: 0.3285 - val_accuracy: 0.8659
Epoch 191/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2501 - accuracy: 0.8985 - val_loss: 0.2631 - val_accuracy: 0.8926
Epoch 192/200
453/453 [==============================] - 32s 72ms/step - loss: 0.2349 - accuracy: 0.9080 - val_loss: 0.2345 - val_accuracy: 0.9119
Epoch 193/200
453/453 [==============================] - 33s 74ms/step - loss: 0.1986 - accuracy: 0.9244 - val_loss: 0.2372 - val_accuracy: 0.9081
Epoch 194/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2235 - accuracy: 0.9142 - val_loss: 0.3134 - val_accuracy: 0.8790
Epoch 195/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2421 - accuracy: 0.9040 - val_loss: 0.2502 - val_accuracy: 0.9001
Epoch 196/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2250 - accuracy: 0.9146 - val_loss: 0.2778 - val_accuracy: 0.9001
Epoch 197/200
453/453 [==============================] - 32s 70ms/step - loss: 0.2504 - accuracy: 0.8997 - val_loss: 0.3098 - val_accuracy: 0.8752
Epoch 198/200
453/453 [==============================] - 32s 71ms/step - loss: 0.2767 - accuracy: 0.8859 - val_loss: 0.2939 - val_accuracy: 0.8845
Epoch 199/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2452 - accuracy: 0.9000 - val_loss: 0.3176 - val_accuracy: 0.8746
Epoch 200/200
453/453 [==============================] - 33s 72ms/step - loss: 0.2610 - accuracy: 0.8920 - val_loss: 0.2798 - val_accuracy: 0.8852
Fold 1, 200 epochs, 6883 sec
_____no_output_____
</code>
| {
"repository": "ShepherdCode/ShepherdML",
"path": "Workshop/GRU_236.ipynb",
"matched_keywords": [
"RNA"
],
"stars": null,
"size": 476642,
"hexsha": "d086407af685fc6351fa88c1c340d078f5476e9f",
"max_line_length": 56822,
"avg_line_length": 171.3923049263,
"alphanum_fraction": 0.7272145552
} |
# Notebook from jlmciver/LSSTC-DSFP-Sessions
Path: Session9/Day4/Matched_filter_tutorial.ipynb
# Welcome to the matched filtering tutorial!
### Installation
Make sure you have PyCBC and some basic lalsuite tools installed. You can do this in a terminal with pip:_____no_output_____
<code>
! pip install lalsuite pycbc_____no_output_____
</code>
<span style="color:gray">Jess notes: this notebook was made with a PyCBC 1.8.0 kernel. </span>
### Learning goals
With this tutorial, you learn how to:
* Generate source waveforms detectable by LIGO, Virgo, KAGRA
* Use PyCBC to run a matched filter search on gravitational wave detector data
* Estimate the significance of a trigger given a background distribution
* **Challenge**: Code up a trigger coincidence algorithm
This tutorial borrows heavily from tutorials made for the [LIGO-Virgo Open Data Workshop](https://www.gw-openscience.org/static/workshop1/course.html) by Alex Nitz. You can find PyCBC documentation and additional examples [here](http://pycbc.org/pycbc/latest/html/py-modindex.html).
Let's get started!
________no_output_____## Generate a gravitational wave signal waveform
We'll use a popular waveform approximant ([SOEBNRv4](https://arxiv.org/pdf/1611.03703.pdf)) to generate waveforms that would be detectable by LIGO, Virgo, or KAGRA.
First we import the packages we'll need. _____no_output_____
<code>
from pycbc.waveform import get_td_waveform
import pylab_____no_output_____
</code>
Let's see what these waveforms look like for different component masses. We'll assume the two compact object have masses equal to each other, and we'll set a lower frequency bound of 30 Hz (determined by the sensitivity of our detectors).
We can also set a time sample rate with `get_td_waveform`. Let's try a rate of 4096 Hz.
Let's make a plot of the plus polarization (`hp`) to get a feel for what the waveforms look like._____no_output_____
<code>
for m in [5, 10, 30, 100]:
hp, hc = get_td_waveform(approximant="SEOBNRv4_opt",
mass1=m,
mass2=m,
delta_t=1.0/4096,
f_lower=30)
pylab.plot(hp.sample_times, hp, label='$M_{\odot 1,2}=%s$' % m)
pylab.legend(loc='upper left')
pylab.ylabel('GW strain (plus polarization)')
pylab.grid()
pylab.xlabel('Time (s)')
pylab.show()_____no_output_____
</code>
Now let's see what happens if we decrease the lower frequency bound from 30 Hz to 15 Hz. _____no_output_____
<code>
for m in [5, 10, 30, 100]:
hp, hc = get_td_waveform(approximant="SEOBNRv4_opt",
mass1=m,
mass2=m,
delta_t=1.0/4096,
f_lower=15)
pylab.plot(hp.sample_times, hp, label='$M_{\odot 1,2}=%s$' % m)
pylab.legend(loc='upper left')
pylab.ylabel('GW strain (plus polarization)')
pylab.grid()
pylab.xlabel('Time (s)')
pylab.show()_____no_output_____
</code>
---
### Exercise 1
What happens to the waveform when the total mass (let's say 20 M<sub>sol</sub>) stays the same, but the mass ratio between the component masses changes?
Compare the waveforms for a m<sub>1</sub> = m<sub>2</sub> = 10 M<sub>sol</sub> system, and a m<sub>1</sub> = 2 M<sub>sol</sub>, m<sub>2</sub> = 18 M<sub>sol</sub> system. What do you notice?
_____no_output_____
<code>
# complete _____no_output_____
</code>
### Exercise 2
How much longer (in signal duration) would LIGO and Virgo (and KAGRA) be able to detect a 1.4-1.4 M<sub>sol</sub> binary neutron star system if our detectors were sensitive down to 10 Hz instead of 30 Hz? ** Note you'll need to use a different waveform approximant here. Try TaylorF2.**
<span style="color:gray">Jess notes: this would be a major benefit of next-generation ("3G") ground-based gravitational wave detectors.</span>_____no_output_____
<code>
# complete _____no_output_____
</code>
---
### Distance vs. signal amplitude
Let's see what happens when we scale the distance (in units of Megaparsecs) for a system with a total mass of 20 M<sub>sol</sub>.
<span style="color:gray">Note: redshift effects are not included here.</span>_____no_output_____
<code>
for d in [100, 500, 1000]:
hp, hc = get_td_waveform(approximant="SEOBNRv4_opt",
mass1=10,
mass2=10,
delta_t=1.0/4096,
f_lower=30,
distance=d)
pylab.plot(hp.sample_times, hp, label='Distance=%s Mpc' % d)
pylab.grid()
pylab.xlabel('Time (s)')
pylab.ylabel('GW strain (plus polarization)')
pylab.legend(loc='upper left')
pylab.show()_____no_output_____
</code>
---
## Run a matched filter search on gravitational wave detector data
PyCBC also maintains a catalog of open data as PyCBC time series objects, easy to manipulate with PyCBC tools. Let's try using that and importing the data around the first detection, GW150914.
_____no_output_____
<code>
import pylab
from pycbc.catalog import Merger
from pycbc.filter import resample_to_delta_t, highpass
merger = Merger("GW150914")
# Get the data from the Hanford detector
strain = merger.strain('H1')_____no_output_____
</code>
### Data pre-conditioning
Once we've imported the open data from this alternate source, the first thing we'll need to do is **pre-condition** the data. This serves a few purposes:
* 1) reduces the dynamic range of the data
* 2) supresses high amplitudes at low frequencies, which can introduce numerical artifacts
* 3) if we don't need high frequency information, downsampling allows us to compute our matched filter result faster
Let's try highpassing above 15 Hz and downsampling to 2048 Hz, and we'll make a plot to see what the result looks like:_____no_output_____
<code>
# Remove the low frequency content and downsample the data to 2048Hz
strain = resample_to_delta_t(highpass(strain, 15.0), 1.0/2048)
pylab.plot(strain.sample_times, strain)
pylab.xlabel('Time (s)')_____no_output_____
</code>
Notice the large amplitude excursions in the data at the start and end of our data segment. This is **spectral leakage** caused by filters we applied to the boundaries ringing off the discontinuities where the data suddenly starts and ends (for a time up to the length of the filter).
To avoid this we should trim the ends of the data in all steps of our filtering. Let's try cropping a couple seconds off of either side. _____no_output_____
<code>
# Remove 2 seconds of data from both the beginning and end
conditioned = strain.crop(2, 2)
pylab.plot(conditioned.sample_times, conditioned)
pylab.xlabel('Time (s)')_____no_output_____
</code>
That's better.
### Calculating the spectral density of the data
Optimal matched filtering requires *whitening*; weighting the frequency components of the potential signal and data by the estimated noise amplitude.
Let's compute the power spectral density (PSD) of our conditioned data.
_____no_output_____
<code>
from pycbc.psd import interpolate, inverse_spectrum_truncation
# Estimate the power spectral density
# We use 4 second samles of our time series in Welch method.
psd = conditioned.psd(4)
# Now that we have the psd we need to interpolate it to match our data
# and then limit the filter length of 1 / PSD. After this, we can
# directly use this PSD to filter the data in a controlled manner
psd = interpolate(psd, conditioned.delta_f)
# 1/PSD will now act as a filter with an effective length of 4 seconds
# Since the data has been highpassed above 15 Hz, and will have low values
# below this we need to informat the function to not include frequencies
# below this frequency.
psd = inverse_spectrum_truncation(psd, 4 * conditioned.sample_rate,
low_frequency_cutoff=15)_____no_output_____
</code>
----
### Define a signal model
Recall that matched filtering is essentially integrating the inner product between your data and your signal model in frequency or time (after weighting frequencies correctly) as you slide your signal model over your data in time.
If there is a signal in the data that matches your 'template', we will see a large value of this inner product (the SNR, or 'signal to noise ratio') at that time.
In a full search, we would grid over the parameters and calculate the SNR time series for each template in our template bank
Here we'll define just one template. Let's assume equal masses (which is within the posterior probability of GW150914). Because we want to match our signal model with each time sample in our data, let's also rescale our signal model vector to match the same number of time samples as our data vector (**<- very important!**).
Let's also plot the output to see what it looks like. _____no_output_____
<code>
m = 36 # Solar masses
hp, hc = get_td_waveform(approximant="SEOBNRv4_opt",
mass1=m,
mass2=m,
delta_t=conditioned.delta_t,
f_lower=20)
# We should resize the vector of our template to match our data
hp.resize(len(conditioned))
pylab.plot(hp)
pylab.xlabel('Time samples')_____no_output_____
</code>
Note that the waveform template currently begins at the start of the vector. However, we want our SNR time series (the inner product between our data and our template) to track with the approximate merger time. To do this, we need to shift our template so that the merger is approximately at the first bin of the data.
For this reason, waveforms returned from `get_td_waveform` have their merger stamped with time zero, so we can easily shift the merger into the right position to compute our SNR time series.
Let's try shifting our template time and plot the output. _____no_output_____
<code>
template = hp.cyclic_time_shift(hp.start_time)
pylab.plot(template)
pylab.xlabel('Time samples')_____no_output_____
</code>
---
### Calculate an SNR time series
Now that we've pre-conditioned our data and defined a signal model, we can compute the output of our matched filter search. _____no_output_____
<code>
from pycbc.filter import matched_filter
import numpy
snr = matched_filter(template, conditioned,
psd=psd, low_frequency_cutoff=20)
pylab.figure(figsize=[10, 4])
pylab.plot(snr.sample_times, abs(snr))
pylab.xlabel('Time (s)')
pylab.ylabel('SNR')_____no_output_____
</code>
Note that as we expect, there is some corruption at the start and end of our SNR time series by the template filter and the PSD filter.
To account for this, we can smoothly zero out 4 seconds (the length of the PSD filter) at the beginning and end for the PSD filtering.
We should remove an 4 additional seconds at the beginning to account for the template length, although this is somewhat generous for so short a template. A longer signal such as from a BNS, would require much more padding at the beginning of the vector._____no_output_____
<code>
snr = snr.crop(4 + 4, 4)
pylab.figure(figsize=[10, 4])
pylab.plot(snr.sample_times, abs(snr))
pylab.ylabel('Signal-to-noise')
pylab.xlabel('Time (s)')
pylab.show()_____no_output_____
</code>
Finally, now that the output is properly cropped, we can find the peak of our SNR time series and estimate the merger time and associated SNR of any event candidate within the data. _____no_output_____
<code>
peak = abs(snr).numpy().argmax()
snrp = snr[peak]
time = snr.sample_times[peak]
print("We found a signal at {}s with SNR {}".format(time,
abs(snrp)))We found a signal at 1126259462.42s with SNR 19.6770890131
</code>
You found the first gravitational wave detection in LIGO Hanford data! Nice work.
---
### Exercise 3
How does the SNR change if you re-compute the matched filter result using a signal model with compenent masses that are closer to the current estimates for GW150914, say m<sub>1</sub> = 36 M<sub>sol</sub> and m<sub>2</sub> = 31 M<sub>sol</sub>?
_____no_output_____
<code>
# complete_____no_output_____
</code>
### Exercise 4
**Network SNR** is the quadrature sum of the single-detector SNR from each contributing detector. GW150914 was detected by H1 and L1. Try calculating the network SNR (you'll need to estimate the SNR in L1 first), and compare your answer to the network PyCBC SNR as reported in the [GWTC-1 catalog](https://arxiv.org/abs/1811.12907)._____no_output_____
<code>
# complete _____no_output_____
</code>
---
## Estimate the single-detector significance of an event candidate
Great, we found a large spike in SNR! What are the chances this is a real astrophysical signal? How often would detector noise produce this by chance?
Let's plot a histogram of SNR values output by our matched filtering analysis for this time and see how much this trigger stands out.
_____no_output_____
<code>
# import what we need
from scipy.stats import norm
from math import pi
from math import exp
# make a histogram of SNR values
background = (abs(snr))
# plot the histogram to check out any other outliers
pylab.hist(background, bins=50)
pylab.xlabel('SNR')
pylab.semilogy()
# use norm.fit to fit a normal (Gaussian) distribution
(mu, sigma) = norm.fit(background)
# print out the mean and standard deviation of the fit
print('The fit mean = %f and the fit std dev = %f' )%(mu, sigma)
The fit mean = 1.295883 and the fit std dev = 0.739471
</code>
### Exercise 5
At what single-detector SNR is the significance of a trigger > 5 sigma?
Remember that sigma is constant for a normal distribution (read: this should be simple multiplication now that we have estimated what 1 sigma is). _____no_output_____
<code>
# complete _____no_output_____
</code>
---
## Challenge
Our match filter analysis assumes the noise is *stationary* and *Gaussian*, which is not a good assumption, and this short data set isn't representative of all the various things that can go bump in the detector (remember the phone?).
**The simple significance estimate above won't work as soon as we encounter a glitch!** We need a better noise background estimate, and we can leverage our detector network to help make our signals stand out from our background.
Observing a gravitational wave signal between detectors is an important cross-check to minimize the impact of transient detector noise. Our strategy:
* We look for loud triggers within a time window to identify foreground events that occur within the gravitational wave travel time (v=c) between detectors, but could come from any sky position.
* We use time slides to estimate the noise background for a network of detectors.
If you still have time, try coding up an algorithm that checks for time coincidence between triggers in different detectors. Remember that the maximum gravitational wave travel time between LIGO detectors is ~10 ms. Check your code with the GPS times for the H1 and L1 triggers you identified for GW150914. _____no_output_____
<code>
# complete if time _____no_output_____
</code>
| {
"repository": "jlmciver/LSSTC-DSFP-Sessions",
"path": "Session9/Day4/Matched_filter_tutorial.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 201946,
"hexsha": "d087f03e4eec7c06970dbc425f5b009ae79a1421",
"max_line_length": 44332,
"avg_line_length": 251.8029925187,
"alphanum_fraction": 0.9202460064
} |
# Notebook from jvictor42/astropy-tutorials
Path: tutorials/color-excess/color-excess.ipynb
# Analyzing interstellar reddening and calculating synthetic photometry_____no_output_____## Authors
Kristen Larson, Lia Corrales, Stephanie T. Douglas, Kelle Cruz
Input from Emir Karamehmetoglu, Pey Lian Lim, Karl Gordon, Kevin Covey_____no_output_____## Learning Goals
- Investigate extinction curve shapes
- Deredden spectral energy distributions and spectra
- Calculate photometric extinction and reddening
- Calculate synthetic photometry for a dust-reddened star by combining `dust_extinction` and `synphot`
- Convert from frequency to wavelength with `astropy.unit` equivalencies
- Unit support for plotting with `astropy.visualization`
## Keywords
dust extinction, synphot, astroquery, units, photometry, extinction, physics, observational astronomy
## Companion Content
* [Bessell & Murphy (2012)](https://ui.adsabs.harvard.edu/#abs/2012PASP..124..140B/abstract)
## Summary
In this tutorial, we will look at some extinction curves from the literature, use one of those curves to deredden an observed spectrum, and practice invoking a background source flux in order to calculate magnitudes from an extinction model.
The primary libraries we'll be using are [dust_extinction](https://dust-extinction.readthedocs.io/en/latest/) and [synphot](https://synphot.readthedocs.io/en/latest/), which are [Astropy affiliated packages](https://www.astropy.org/affiliated/).
We recommend installing the two packages in this fashion:
```
pip install synphot
pip install dust_extinction
```
This tutorial requires v0.7 or later of `dust_extinction`. To ensure that all commands work properly, make sure you have the correct version installed. If you have v0.6 or earlier installed, run the following command to upgrade
```
pip install dust_extinction --upgrade
```_____no_output_____
<code>
import pathlib
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import astropy.units as u
from astropy.table import Table
from dust_extinction.parameter_averages import CCM89, F99
from synphot import units, config
from synphot import SourceSpectrum,SpectralElement,Observation,ExtinctionModel1D
from synphot.models import BlackBodyNorm1D
from synphot.spectrum import BaseUnitlessSpectrum
from synphot.reddening import ExtinctionCurve
from astroquery.simbad import Simbad
from astroquery.mast import Observations
import astropy.visualization_____no_output_____
</code>
# Introduction_____no_output_____Dust in the interstellar medium (ISM) extinguishes background starlight. The wavelength dependence of the extinction is such that short-wavelength light is extinguished more than long-wavelength light, and we call this effect *reddening*.
If you're new to extinction, here is a brief introduction to the types of quantities involved.
The fractional change to the flux of starlight is
$$
\frac{dF_\lambda}{F_\lambda} = -\tau_\lambda
$$
where $\tau$ is the optical depth and depends on wavelength. Integrating along the line of sight, the resultant flux is an exponential function of optical depth,
$$
\tau_\lambda = -\ln\left(\frac{F_\lambda}{F_{\lambda,0}}\right).
$$
With an eye to how we define magnitudes, we usually change the base from $e$ to 10,
$$
\tau_\lambda = -2.303\log\left(\frac{F_\lambda}{F_{\lambda,0}}\right),
$$
and define an extinction $A_\lambda = 1.086 \,\tau_\lambda$ so that
$$
A_\lambda = -2.5\log\left(\frac{F_\lambda}{F_{\lambda,0}}\right).
$$
There are two basic take-home messages from this derivation:
* Extinction introduces a multiplying factor $10^{-0.4 A_\lambda}$ to the flux.
* Extinction is defined relative to the flux without dust, $F_{\lambda,0}$.
_____no_output_____Once astropy and the affiliated packages are installed, we can import from them as needed:_____no_output_____# Example 1: Investigate Extinction Models_____no_output_____The `dust_extinction` package provides various models for extinction $A_\lambda$ normalized to $A_V$. The shapes of normalized curves are relatively (and perhaps surprisingly) uniform in the Milky Way. The little variation that exists is often parameterized by the ratio of extinction ($A_V$) to reddening in the blue-visual ($E_{B-V}$),
$$
R_V \equiv \frac{A_V}{E_{B-V}}
$$
where $E_{B-V}$ is differential extinction $A_B-A_V$. In this example, we show the $R_V$-parameterization for the Clayton, Cardelli, & Mathis (1989, CCM) and the Fitzpatrick (1999) models. [More model options are available in the `dust_extinction` documentation.](https://dust-extinction.readthedocs.io/en/latest/dust_extinction/model_flavors.html)_____no_output_____
<code>
# Create wavelengths array.
wav = np.arange(0.1, 3.0, 0.001)*u.micron
for model in [CCM89, F99]:
for R in (2.0,3.0,4.0):
# Initialize the extinction model
ext = model(Rv=R)
plt.plot(1/wav, ext(wav), label=model.name+' R='+str(R))
plt.xlabel('$\lambda^{-1}$ ($\mu$m$^{-1}$)')
plt.ylabel('A($\lambda$) / A(V)')
plt.legend(loc='best')
plt.title('Some Extinction Laws')
plt.show()_____no_output_____
</code>
Astronomers studying the ISM often display extinction curves against inverse wavelength (wavenumber) to show the ultraviolet variation, as we do here. Infrared extinction varies much less and approaches zero at long wavelength in the absence of wavelength-independent, or grey, extinction._____no_output_____# Example 2: Deredden a Spectrum_____no_output_____Here we deredden (unextinguish) the IUE ultraviolet spectrum and optical photometry of the star $\rho$ Oph (HD 147933).
First, we will use astroquery to fetch the archival [IUE spectrum from MAST](https://archive.stsci.edu/iue/):_____no_output_____
<code>
download_dir = pathlib.Path('~/.astropy/cache/astroquery/Mast').expanduser()
download_dir.mkdir(exist_ok=True)
obsTable = Observations.query_object("HD 147933", radius="1 arcsec")
obsTable_spec = obsTable[obsTable['dataproduct_type'] == 'spectrum']
obsTable_spec_____no_output_____obsids = obsTable_spec[39]['obsid']
dataProductsByID = Observations.get_product_list(obsids)
manifest = Observations.download_products(dataProductsByID,
download_dir=str(download_dir))_____no_output_____
</code>
We read the downloaded files into an astropy table:_____no_output_____
<code>
t_lwr = Table.read(download_dir / 'mastDownload/IUE/lwr05639/lwr05639mxlo_vo.fits')
print(t_lwr)_____no_output_____
</code>
The `.quantity` extension in the next lines will read the Table columns into Quantity vectors. Quantities keep the units of the Table column attached to the numpy array values._____no_output_____
<code>
wav_UV = t_lwr['WAVE'][0,].quantity
UVflux = t_lwr['FLUX'][0,].quantity_____no_output_____
</code>
Now, we use astroquery again to fetch photometry from Simbad to go with the IUE spectrum:_____no_output_____
<code>
custom_query = Simbad()
custom_query.add_votable_fields('fluxdata(U)','fluxdata(B)','fluxdata(V)')
phot_table=custom_query.query_object('HD 147933')
Umag=phot_table['FLUX_U']
Bmag=phot_table['FLUX_B']
Vmag=phot_table['FLUX_V']_____no_output_____
</code>
To convert the photometry to flux, we look up some [properties of the photometric passbands](http://ned.ipac.caltech.edu/help/photoband.lst), including the flux of a magnitude zero star through the each passband, also known as the zero-point of the passband._____no_output_____
<code>
wav_U = 0.3660 * u.micron
zeroflux_U_nu = 1.81E-23 * u.Watt/(u.m*u.m*u.Hz)
wav_B = 0.4400 * u.micron
zeroflux_B_nu = 4.26E-23 * u.Watt/(u.m*u.m*u.Hz)
wav_V = 0.5530 * u.micron
zeroflux_V_nu = 3.64E-23 * u.Watt/(u.m*u.m*u.Hz)_____no_output_____
</code>
The zero-points that we found for the optical passbands are not in the same units as the IUE fluxes. To make matters worse, the zero-point fluxes are $F_\nu$ and the IUE fluxes are $F_\lambda$. To convert between them, the wavelength is needed. Fortunately, astropy provides an easy way to make the conversion with *equivalencies*:_____no_output_____
<code>
zeroflux_U = zeroflux_U_nu.to(u.erg/u.AA/u.cm/u.cm/u.s,
equivalencies=u.spectral_density(wav_U))
zeroflux_B = zeroflux_B_nu.to(u.erg/u.AA/u.cm/u.cm/u.s,
equivalencies=u.spectral_density(wav_B))
zeroflux_V = zeroflux_V_nu.to(u.erg/u.AA/u.cm/u.cm/u.s,
equivalencies=u.spectral_density(wav_V))_____no_output_____
</code>
Now we can convert from photometry to flux using the definition of magnitude:
$$
F=F_0\ 10^{-0.4\, m}
$$_____no_output_____
<code>
Uflux = zeroflux_U * 10.**(-0.4*Umag)
Bflux = zeroflux_B * 10.**(-0.4*Bmag)
Vflux = zeroflux_V * 10.**(-0.4*Vmag)_____no_output_____
</code>
Using astropy quantities allow us to take advantage of astropy's unit support in plotting. [Calling `astropy.visualization.quantity_support` explicitly turns the feature on.](http://docs.astropy.org/en/stable/units/quantity.html#plotting-quantities) Then, when quantity objects are passed to matplotlib plotting functions, the axis labels are automatically labeled with the unit of the quantity. In addition, quantities are converted automatically into the same units when combining multiple plots on the same axes.
_____no_output_____
<code>
astropy.visualization.quantity_support()
plt.plot(wav_UV,UVflux,'m',label='UV')
plt.plot(wav_V,Vflux,'ko',label='U, B, V')
plt.plot(wav_B,Bflux,'ko')
plt.plot(wav_U,Uflux,'ko')
plt.legend(loc='best')
plt.ylim(0,3E-10)
plt.title('rho Oph')
plt.show()_____no_output_____
</code>
Finally, we initialize the extinction model, choosing values $R_V = 5$ and $E_{B-V} = 0.5$. This star is famous in the ISM community for having large-$R_V$ dust in the line of sight._____no_output_____
<code>
Rv = 5.0 # Usually around 3, but about 5 for this star.
Ebv = 0.5
ext = F99(Rv=Rv)_____no_output_____
</code>
To extinguish (redden) a spectrum, multiply by the `ext.extinguish` function. To unextinguish (deredden), divide by the same `ext.extinguish`, as we do here:_____no_output_____
<code>
plt.semilogy(wav_UV,UVflux,'m',label='UV')
plt.semilogy(wav_V,Vflux,'ko',label='U, B, V')
plt.semilogy(wav_B,Bflux,'ko')
plt.semilogy(wav_U,Uflux,'ko')
plt.semilogy(wav_UV,UVflux/ext.extinguish(wav_UV,Ebv=Ebv),'b',
label='dereddened: EBV=0.5, RV=5')
plt.semilogy(wav_V,Vflux/ext.extinguish(wav_V,Ebv=Ebv),'ro',
label='dereddened: EBV=0.5, RV=5')
plt.semilogy(wav_B,Bflux/ext.extinguish(wav_B,Ebv=Ebv),'ro')
plt.semilogy(wav_U,Uflux/ext.extinguish(wav_U,Ebv=Ebv),'ro')
plt.legend(loc='best')
plt.title('rho Oph')
plt.show()_____no_output_____
</code>
Notice that, by dereddening the spectrum, the absorption feature at 2175 Angstrom is removed. This feature can also be seen as the prominent bump in the extinction curves in Example 1. That we have smoothly removed the 2175 Angstrom feature suggests that the values we chose, $R_V = 5$ and $E_{B-V} = 0.5$, are a reasonable model for the foreground dust.
Those experienced with dereddening should notice that that `dust_extinction` returns $A_\lambda/A_V$, while other routines like the IDL fm_unred procedure often return $A_\lambda/E_{B-V}$ by default and need to be divided by $R_V$ in order to compare directly with `dust_extinction`._____no_output_____# Example 3: Calculate Color Excess with `synphot`_____no_output_____Calculating broadband *photometric* extinction is harder than it might look at first. All we have to do is look up $A_\lambda$ for a particular passband, right? Under the right conditions, yes. In general, no.
Remember that we have to integrate over a passband to get synthetic photometry,
$$
A = -2.5\log\left(\frac{\int W_\lambda F_{\lambda,0} 10^{-0.4A_\lambda} d\lambda}{\int W_\lambda F_{\lambda,0} d\lambda} \right),
$$
where $W_\lambda$ is the fraction of incident energy transmitted through a filter. See the detailed appendix in [Bessell & Murphy (2012)](https://ui.adsabs.harvard.edu/#abs/2012PASP..124..140B/abstract)
for an excellent review of the issues and common misunderstandings in synthetic photometry.
There is an important point to be made here. The expression above does not simplify any further. Strictly speaking, it is impossible to convert spectral extinction $A_\lambda$ into a magnitude system without knowing the wavelength dependence of the source's original flux across the filter in question. As a special case, if we assume that the source flux is constant in the band (i.e. $F_\lambda = F$), then we can cancel these factors out from the integrals, and extinction in magnitudes becomes the weighted average of the extinction factor across the filter in question. In that special case, $A_\lambda$ at $\lambda_{\rm eff}$ is a good approximation for magnitude extinction.
In this example, we will demonstrate the more general calculation of photometric extinction. We use a blackbody curve for the flux before the dust, apply an extinction curve, and perform synthetic photometry to calculate extinction and reddening in a magnitude system.
_____no_output_____First, let's get the filter transmission curves:_____no_output_____
<code>
# Optional, for when the STScI ftp server is not answering:
config.conf.vega_file = 'http://ssb.stsci.edu/cdbs/calspec/alpha_lyr_stis_008.fits'
config.conf.johnson_u_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/johnson_u_004_syn.fits'
config.conf.johnson_b_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/johnson_b_004_syn.fits'
config.conf.johnson_v_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/johnson_v_004_syn.fits'
config.conf.johnson_r_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/johnson_r_003_syn.fits'
config.conf.johnson_i_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/johnson_i_003_syn.fits'
config.conf.bessel_j_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/bessell_j_003_syn.fits'
config.conf.bessel_h_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/bessell_h_004_syn.fits'
config.conf.bessel_k_file = 'http://ssb.stsci.edu/cdbs/comp/nonhst/bessell_k_003_syn.fits'
u_band = SpectralElement.from_filter('johnson_u')
b_band = SpectralElement.from_filter('johnson_b')
v_band = SpectralElement.from_filter('johnson_v')
r_band = SpectralElement.from_filter('johnson_r')
i_band = SpectralElement.from_filter('johnson_i')
j_band = SpectralElement.from_filter('bessel_j')
h_band = SpectralElement.from_filter('bessel_h')
k_band = SpectralElement.from_filter('bessel_k')_____no_output_____
</code>
If you are running this with your own python, see the [synphot documentation](https://synphot.readthedocs.io/en/latest/#installation-and-setup) on how to install your own copy of the necessary files._____no_output_____Next, let's make a background flux to which we will apply extinction. Here we make a 10,000 K blackbody using the model mechanism from within `synphot` and normalize it to $V$ = 10 in the Vega-based magnitude system._____no_output_____
<code>
# First, create a blackbody at some temperature.
sp = SourceSpectrum(BlackBodyNorm1D, temperature=10000)
# sp.plot(left=1, right=15000, flux_unit='flam', title='Blackbody')
# Get the Vega spectrum as the zero point flux.
vega = SourceSpectrum.from_vega()
# vega.plot(left=1, right=15000)
# Normalize the blackbody to some chosen magnitude, say V = 10.
vmag = 10.
v_band = SpectralElement.from_filter('johnson_v')
sp_norm = sp.normalize(vmag * units.VEGAMAG, v_band, vegaspec=vega)
sp_norm.plot(left=1, right=15000, flux_unit='flam', title='Normed Blackbody')_____no_output_____
</code>
Now we initialize the extinction model and choose an extinction of $A_V$ = 2. To get the `dust_extinction` model working with `synphot`, we create a wavelength array and make a spectral element with the extinction model as a lookup table._____no_output_____
<code>
# Initialize the extinction model and choose the extinction, here Av = 2.
ext = CCM89(Rv=3.1)
Av = 2.
# Create a wavelength array.
wav = np.arange(0.1, 3, 0.001)*u.micron
# Make the extinction model in synphot using a lookup table.
ex = ExtinctionCurve(ExtinctionModel1D,
points=wav, lookup_table=ext.extinguish(wav, Av=Av))
sp_ext = sp_norm*ex
sp_ext.plot(left=1, right=15000, flux_unit='flam',
title='Normed Blackbody with Extinction')_____no_output_____
</code>
Synthetic photometry refers to modeling an observation of a star by multiplying the theoretical model for the astronomical flux through a certain filter response function, then integrating._____no_output_____
<code>
# "Observe" the star through the filter and integrate to get photometric mag.
sp_obs = Observation(sp_ext, v_band)
sp_obs_before = Observation(sp_norm, v_band)
# sp_obs.plot(left=1, right=15000, flux_unit='flam',
# title='Normed Blackbody with Extinction through V Filter')_____no_output_____
</code>
Next, `synphot` performs the integration and computes magnitudes in the Vega system._____no_output_____
<code>
sp_stim_before = sp_obs_before.effstim(flux_unit='vegamag', vegaspec=vega)
sp_stim = sp_obs.effstim(flux_unit='vegamag', vegaspec=vega)
print('before dust, V =', np.round(sp_stim_before,1))
print('after dust, V =', np.round(sp_stim,1))
# Calculate extinction and compare to our chosen value.
Av_calc = sp_stim - sp_stim_before
print('$A_V$ = ', np.round(Av_calc,1))_____no_output_____
</code>
This is a good check for us to do. We normalized our spectrum to $V$ = 10 mag and added 2 mag of visual extinction, so the synthetic photometry procedure should reproduce these chosen values, and it does. Now we are ready to find the extinction in other passbands. _____no_output_____We calculate the new photometry for the rest of the Johnson optical and the Bessell infrared filters. We calculate extinction $A = \Delta m$ and plot color excess, $E(\lambda - V) = A_\lambda - A_V$.
Notice that `synphot` calculates the effective wavelength of the observations for us, which is very useful for plotting the results. We show reddening with the model extinction curve for comparison in the plot._____no_output_____
<code>
bands = [u_band,b_band,v_band,r_band,i_band,j_band,h_band,k_band]
for band in bands:
# Calculate photometry with dust:
sp_obs = Observation(sp_ext, band, force='extrap')
obs_effstim = sp_obs.effstim(flux_unit='vegamag', vegaspec=vega)
# Calculate photometry without dust:
sp_obs_i = Observation(sp_norm, band, force='extrap')
obs_i_effstim = sp_obs_i.effstim(flux_unit='vegamag', vegaspec=vega)
# Extinction = mag with dust - mag without dust
# Color excess = extinction at lambda - extinction at V
color_excess = obs_effstim - obs_i_effstim - Av_calc
plt.plot(sp_obs_i.effective_wavelength(), color_excess,'or')
print(np.round(sp_obs_i.effective_wavelength(),1), ',',
np.round(color_excess,2))
# Plot the model extinction curve for comparison
plt.plot(wav,Av*ext(wav)-Av,'--k')
plt.ylim([-2,2])
plt.xlabel('$\lambda$ (Angstrom)')
plt.ylabel('E($\lambda$-V)')
plt.title('Reddening of T=10,000K Background Source with Av=2')
plt.show() _____no_output_____
</code>
## Exercise
Try changing the blackbody temperature to something very hot or very cool. Are the color excess values the same? Have the effective wavelengths changed?
Note that the photometric extinction changes because the filter transmission is not uniform. The observed throughput of the filter depends on the shape of the background source flux._____no_output_____
| {
"repository": "jvictor42/astropy-tutorials",
"path": "tutorials/color-excess/color-excess.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 210,
"size": 30693,
"hexsha": "d08910e6740969d4bfdcf703fb9e0e08e3fce7c1",
"max_line_length": 696,
"avg_line_length": 33.9149171271,
"alphanum_fraction": 0.6122568664
} |
# Notebook from bdmckean/woot_math_analysis
Path: working/EDA_WM-BrianMc-topics-Method2-heat_map-100-clusters-random62.ipynb
<code>
import pymongo
import pandas as pd
import numpy as np
from pymongo import MongoClient
from bson.objectid import ObjectId
import datetime
import matplotlib.pyplot as plt
from collections import defaultdict
%matplotlib inline
import json
plt.style.use('ggplot')
import seaborn as sns
from math import log10, floor
from time import time
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.cluster import KMeans, MiniBatchKMeans_____no_output_____
</code>
# CU Woot Math Method 2 for unsupervosed discovery of new behavior traits
## 1) Convert response field dictionary into a document
## 2) Develop word vector using term frequency - inverse document frequency
## 3) Use K-Means to cluster documents
## 4) Map traits to clusters to validate technique
In the first results presented to Woot Math a 100K sample of the entire data set was chosen. In this report, I'll start with the same type of analysis to develop the same heat map. In the meeting Sean and Brent suggested using just one of the qual_id and repeat the experiment and then look at the samples in clusers without traits. I'll do that in a subsequent analysis
_____no_output_____## Part 1. Heat map with 100 K sample of all qual_id's_____no_output_____
<code>
## Connect to local DB
client = MongoClient('localhost', 27017)
print ("Setup db access")Setup db access
#
# Get collections from mongodb
#
#db = client.my_test_db
db = client.test
_____no_output_____chunk = 100000
start = 0
end = start + chunk_____no_output_____#reponses = db.anon_student_task_responses.find({'correct':False})[start:end]
reponses = db.anon_student_task_responses.find()[start:end]_____no_output_____df_responses = pd.DataFrame(list(reponses))_____no_output_____print (df_responses.shape)(100000, 27)
## Make the documents to be analyzed_____no_output_____## Functions for turning dictionary into document
def make_string_from_list(key, elem_list):
# Append key to each item in list
ans = ''
for elem in elem_list:
ans += key + '_' + elem
def make_string(elem, key=None, top=True):
ans = ''
if not elem:
return ans
if top:
top = False
top_keys = []
for idx in range(len(elem.keys())):
top_keys.append(True)
for idx, key in enumerate(elem.keys()):
if top_keys[idx]:
top = True
top_keys[idx] = False
ans += ' '
else:
top = False
#print ('ans = ', ans)
#print (type(elem[key]))
if type(elem[key]) is str or\
type(elem[key]) is int:
#print ('add value', elem[key])
value = str(elem[key])
#ans += key + '_' + value + ' ' + value + ' '
ans += key + '_' + value + ' '
elif type(elem[key]) is list:
#print ('add list', elem[key])
temp_elem = dict()
for item in elem[key]:
temp_elem[key] = item
ans += make_string(temp_elem, top)
elif type(elem[key]) is dict:
#print ('add dict', elem[key])
for item_key in elem[key].keys():
temp_elem = dict()
temp_elem[item_key] = elem[key][item_key]
ans += key + '_' + make_string(temp_elem, top)
elif type(elem[key]) is float:
#print ('add dict', elem[key])
sig = 2
value = elem[key]
value = round(value, sig-int(
floor(log10(abs(value))))-1)
value = str(value)
#ans += key + '_' + value + ' ' + value + ' '
ans += key + '_' + value + ' '
# ans += ' ' + key + ' '
#print ('not handled', elem[key])
return ans_____no_output_____# Makes the cut & paste below easier
df3 = df_responses_____no_output_____df3['response_doc'] = df3['response'].map(make_string)
_____no_output_____df3['response_doc'] = df3['response_doc'].map(lambda x: x + ' ')
df3['response_doc'] = df3['response_doc'].map(lambda x: x.replace('/','_'))
df3['response_doc'] = df3['response_doc'] + ' ' + df3['txt']
df3['response_doc'] = df3['response_doc'].map(lambda x: x + ' ')
df3['response_doc'] = df3['response_doc'].map(lambda x: x.replace("\n", ""))
df3['response_doc'] = df3['response_doc'].map(lambda x: x.replace("?", " "))_____no_output_____
</code>
## Sample Documents_____no_output_____
<code>
for idx in range(20):
print ("Sample number:", idx, "\n", df3.iloc[idx]['response_doc'])Sample number: 0
fraction_cblock_chains_ right_442 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_2 sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_2 fraction_cblock_chains_ left_97 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_2 lcm_sum_ __as3_type_Fraction plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_end_marker_noline.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_dog.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_cat_dog_trail.swf den_2 fraction_input_value_1_2 num_1 fraction_cblock_total_count_1 fraction_cblock_counts_ 1_2_1 whole_ Use the 1/2 pieces to figure out how far the dog traveled.Answer: 1/2
Sample number: 1
fraction_cblock_total_count_4 plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_panda.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf input_4 fraction_cblock_chains_ right_856 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 pieces_1_4 pieces_1_4 pieces_1_4 fraction_cblock_chains_ left_165 fraction_cblock_chains_ lcm_sum_ numerator_4 lcm_sum_ denominator_4 lcm_sum_ __as3_type_Fraction numberline_associations_ numberline_associations_ position_720.0 numberline_associations_ pos_value_1.0 numberline_associations_ obj_name_object fraction_cblock_counts_ 1_4_4 Drag the panda to 4/4 of a yard from the start.Answer: 4/4
Sample number: 2
fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_2 lcm_sum_ denominator_8 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_348 fraction_cblock_chains_ pieces_1_8 pieces_1_8 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_4 sum_ __as3_type_Fraction fraction_cblock_chains_ left_590 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_6 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_705 fraction_cblock_chains_ pieces_1_6 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_6 sum_ __as3_type_Fraction fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_4 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_348 fraction_cblock_chains_ pieces_1_4 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_4 sum_ __as3_type_Fraction fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_866 fraction_cblock_chains_ pieces_1 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_1 sum_ __as3_type_Fraction fraction_cblock_total_count_5 fraction_cblock_counts_ 1_1 fraction_cblock_counts_ 1_8_2 fraction_cblock_counts_ 1_6_1 fraction_cblock_counts_ 1_4_1 fraction_cblock_containment_ piece0_ lcm_sum_ numerator_2 lcm_sum_ denominator_8 lcm_sum_ __as3_type_Fraction piece0_ piece0_ pieces_1_8 pieces_1_8 piece0_ sum_ numerator_1 sum_ denominator_4 sum_ __as3_type_Fraction Model how many eighths are equal to one fourth.Answer: 2
Sample number: 3
fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_2 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_521 fraction_cblock_chains_ pieces_1_2 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_2 sum_ __as3_type_Fraction fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_4 lcm_sum_ denominator_8 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_521 fraction_cblock_chains_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_2 sum_ __as3_type_Fraction fraction_cblock_chains_ left_176 fraction_cblock_chains_ lcm_sum_ numerator_1 lcm_sum_ denominator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ right_866 fraction_cblock_chains_ pieces_1 fraction_cblock_chains_ sum_ numerator_1 sum_ denominator_1 sum_ __as3_type_Fraction fraction_cblock_total_count_6 fraction_cblock_counts_ 1_1 fraction_cblock_counts_ 1_2_1 fraction_cblock_counts_ 1_8_4 fraction_cblock_containment_ [Fraction] 1_2_ lcm_sum_ numerator_4 lcm_sum_ denominator_8 lcm_sum_ __as3_type_Fraction [Fraction] 1_2_ [Fraction] 1_2_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 [Fraction] 1_2_ sum_ numerator_1 sum_ denominator_2 sum_ __as3_type_Fraction Model how many halves are equal to four eighths.Answer: 1
Sample number: 4
fraction_circle_containment_ [Fraction] 1_2_ lcm_sum_ numerator_4 lcm_sum_ denominator_8 lcm_sum_ __as3_type_Fraction [Fraction] 1_2_ [Fraction] 1_2_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 [Fraction] 1_2_ sum_ numerator_1 sum_ denominator_2 sum_ __as3_type_Fraction fraction_circle_total_count_6 fraction_circle_groups_ x_512 fraction_circle_groups_ y_300 fraction_circle_groups_ scale_1.0 fraction_circle_groups_ pieces_1_2 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1 fraction_circle_groups_ chains_ right_180 chains_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 chains_ left_0 fraction_circle_counts_ 1_1 fraction_circle_counts_ 1_2_1 fraction_circle_counts_ 1_8_4 Cameron ate 4/8 of a pizza.Cover the pizza to model how many halves of a pizza he ate.Answer: 1
Sample number: 5
image_object_groups_ total_6 image_object_groups_ on_3 image_object_groups_ url_assets_objects_singles_watch.swf image_object_groups_ off_3 Shade 1/2 of the 6 watches.Answer: 1/2
Sample number: 6
Shade 1/4 of the circle.answer={:n=>3, :d=>12}
Sample number: 7
Shade 1/3 of the rectangle.answer={:n=>2, :d=>6}
Sample number: 8
fraction_circle_groups_ x_512 fraction_circle_groups_ scale_1 fraction_circle_groups_ chains_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 chains_ left_0 chains_ right_180 fraction_circle_groups_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_2 pieces_1 fraction_circle_groups_ y_300 fraction_circle_containment_ piece_0_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction piece_0_ piece_0_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 piece_0_ lcm_sum_ denominator_8 lcm_sum_ numerator_4 lcm_sum_ __as3_type_Fraction fraction_circle_counts_ 1_1 fraction_circle_counts_ 1_2_1 fraction_circle_counts_ 1_8_4 fraction_circle_total_count_6 Drag one eighth pieces to cover all of the 1/2 piece.Answer: 4
Sample number: 9
fraction_circle_groups_ x_512 fraction_circle_groups_ scale_1.0 fraction_circle_groups_ chains_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 chains_ left_0 chains_ right_180 fraction_circle_groups_ pieces_1_2 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1 fraction_circle_groups_ y_300 fraction_circle_containment_ [Fraction] 1_2_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction [Fraction] 1_2_ [Fraction] 1_2_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 [Fraction] 1_2_ lcm_sum_ denominator_8 lcm_sum_ numerator_4 lcm_sum_ __as3_type_Fraction fraction_circle_counts_ 1_1 fraction_circle_counts_ 1_2_1 fraction_circle_counts_ 1_8_4 fraction_circle_total_count_6 Drag one half pieces to cover all of the 4/8 shown.Answer: 1
Sample number: 10
fraction_circle_groups_ x_512 fraction_circle_groups_ scale_1.0 fraction_circle_groups_ chains_ pieces_1_4 pieces_1_4 chains_ left_0 chains_ right_180 fraction_circle_groups_ pieces_1_2 pieces_1_4 pieces_1_4 pieces_1 fraction_circle_groups_ y_300 fraction_circle_containment_ [Fraction] 1_2_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction [Fraction] 1_2_ [Fraction] 1_2_ pieces_1_4 pieces_1_4 [Fraction] 1_2_ lcm_sum_ denominator_4 lcm_sum_ numerator_2 lcm_sum_ __as3_type_Fraction fraction_circle_counts_ 1_1 fraction_circle_counts_ 1_2_1 fraction_circle_counts_ 1_4_2 fraction_circle_total_count_4 Drag one half pieces to cover all of the 2/4 shown.Answer: 1
Sample number: 11
radio_choice_C radio_group_problem_ choice_C radio_group_problem_ text_3_6 radio_text_3_6 What fraction has 6 as the denominator () 6/7 () 4/5 () 3/6Answer: 3/6
Sample number: 12
fraction_cblock_chains_ sum_ denominator_10 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1458 fraction_cblock_chains_ sum_ denominator_5 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_2 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 pieces_1_10 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1297 fraction_cblock_chains_ sum_ denominator_10 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1531 fraction_cblock_chains_ sum_ denominator_10 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1214 fraction_cblock_chains_ sum_ denominator_10 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1424 fraction_cblock_chains_ sum_ denominator_5 sum_ numerator_2 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_10 lcm_sum_ numerator_4 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_10 pieces_1_10 pieces_1_10 pieces_1_10 fraction_cblock_chains_ left_544 fraction_cblock_chains_ right_820 fraction_cblock_chains_ sum_ denominator_84 sum_ numerator_73 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_84 lcm_sum_ numerator_73 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_7 pieces_1_7 pieces_1_6 pieces_1_6 pieces_1_4 fraction_cblock_chains_ left_1001 fraction_cblock_chains_ right_1272 fraction_cblock_chains_ sum_ denominator_35 sum_ numerator_17 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_35 lcm_sum_ numerator_17 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_7 pieces_1_7 pieces_1_5 fraction_cblock_chains_ left_981 fraction_cblock_chains_ right_1316 fraction_cblock_chains_ sum_ denominator_28 sum_ numerator_11 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_28 lcm_sum_ numerator_11 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_7 pieces_1_4 fraction_cblock_chains_ left_1001 fraction_cblock_chains_ right_1272 fraction_cblock_chains_ sum_ denominator_7 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_7 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_7 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1300 fraction_cblock_chains_ sum_ denominator_7 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_7 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_7 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1248 fraction_cblock_chains_ sum_ denominator_6 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1316 fraction_cblock_chains_ sum_ denominator_6 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1387 fraction_cblock_chains_ sum_ denominator_6 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1220 fraction_cblock_chains_ sum_ denominator_6 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1387 fraction_cblock_chains_ sum_ denominator_5 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_5 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_5 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1358 fraction_cblock_chains_ sum_ denominator_5 sum_ numerator_3 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_5 lcm_sum_ numerator_3 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_5 pieces_1_5 pieces_1_5 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1337 fraction_cblock_chains_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_2 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1523 fraction_cblock_chains_ sum_ denominator_4 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1272 fraction_cblock_chains_ sum_ denominator_4 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1358 fraction_cblock_chains_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_2 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_2 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1531 fraction_cblock_chains_ sum_ denominator_2 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_2 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1389 fraction_cblock_chains_ sum_ denominator_4 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1216 fraction_cblock_chains_ sum_ denominator_4 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_1351 fraction_cblock_chains_ sum_ denominator_1 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_1 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1 fraction_cblock_chains_ left_1024 fraction_cblock_chains_ right_2045 fraction_cblock_chains_ sum_ denominator_1 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_1 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1 fraction_cblock_chains_ left_130 fraction_cblock_chains_ right_820 fraction_cblock_containment_ bar1_ sum_ denominator_5 sum_ numerator_2 sum_ __as3_type_Fraction bar1_ bar1_ pieces_1_10 pieces_1_10 pieces_1_10 pieces_1_10 bar1_ lcm_sum_ denominator_10 lcm_sum_ numerator_4 lcm_sum_ __as3_type_Fraction fraction_cblock_containment_ [Fraction] 1_4_ sum_ denominator_5 sum_ numerator_1 sum_ __as3_type_Fraction [Fraction] 1_4_ [Fraction] 1_4_ pieces_1_5 [Fraction] 1_4_ lcm_sum_ denominator_5 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_containment_ [Fraction] 1_ sum_ denominator_10 sum_ numerator_1 sum_ __as3_type_Fraction [Fraction] 1_ [Fraction] 1_ pieces_1_10 [Fraction] 1_ lcm_sum_ denominator_10 lcm_sum_ numerator_1 lcm_sum_ __as3_type_Fraction fraction_cblock_total_count_41 fraction_cblock_counts_ 1_2 fraction_cblock_counts_ 1_7_7 fraction_cblock_counts_ 1_4_10 fraction_cblock_counts_ 1_6_6 fraction_cblock_counts_ 1_5_5 fraction_cblock_counts_ 1_2_1 fraction_cblock_counts_ 1_10_10 Model 4/10 on the black bar using the fraction pieces below.Answer: [object Object]
Sample number: 13
whole_ fraction_input_value_4_6 fraction_cblock_chains_ sum_ denominator_3 sum_ numerator_2 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_4 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 pieces_1_6 pieces_1_6 pieces_1_6 fraction_cblock_chains_ left_96 fraction_cblock_chains_ right_522 num_4 plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_end_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_beetle.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_beetle_trail.swf den_6 fraction_cblock_total_count_4 fraction_cblock_counts_ 1_6_4 Use the 1/6 pieces to figure out how far the beetle traveled.Answer: 4/6
Sample number: 14
plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_panda.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf input_8 numberline_associations_ position_634 numberline_associations_ pos_value_0.88 numberline_associations_ obj_name_object numberline_associations_ fraction_cblock_chains_ sum_ denominator_8 sum_ numerator_7 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_8 lcm_sum_ numerator_7 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 pieces_1_8 fraction_cblock_chains_ left_165 fraction_cblock_chains_ right_769 fraction_cblock_total_count_7 fraction_cblock_counts_ 1_8_7 Drag the panda to 7/8 of a yard from the start.Answer: 7/8
Sample number: 15
input_8 One yard on the number line is divided intoAnswer: sixths
Sample number: 16
numberline_associations_ position_580.0 numberline_associations_ pos_value_1.0 numberline_associations_ obj_name_answer_text numberline_associations_ obj_value_3_3 input_ Drag the fraction to its correct location on the number line.Answer: 3/3
Sample number: 17
plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_shark.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf input_6 numberline_associations_ position_722 numberline_associations_ pos_value_1.0 numberline_associations_ obj_name_object numberline_associations_ fraction_cblock_chains_ sum_ denominator_1 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_6 lcm_sum_ numerator_6 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_6 pieces_1_6 pieces_1_6 pieces_1_6 pieces_1_6 pieces_1_6 fraction_cblock_chains_ left_165 fraction_cblock_chains_ right_856 fraction_cblock_total_count_6 fraction_cblock_counts_ 1_6_6 Drag the shark to 1/6 of a yard from the start.Answer: 1/6
Sample number: 18
whole_ fraction_input_value_1_3 fraction_cblock_chains_ sum_ denominator_1 sum_ numerator_1 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_3 lcm_sum_ numerator_3 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_3 pieces_1_3 pieces_1_3 fraction_cblock_chains_ left_96 fraction_cblock_chains_ right_657 num_1 plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_end_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_snail.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_snail_trail.swf den_3 fraction_cblock_total_count_3 fraction_cblock_counts_ 1_3_3 Use the 1/3 pieces to figure out how far the snail traveled.Answer: 3/3
Sample number: 19
whole_ fraction_input_value_3_4 fraction_cblock_chains_ sum_ denominator_4 sum_ numerator_3 sum_ __as3_type_Fraction fraction_cblock_chains_ lcm_sum_ denominator_4 lcm_sum_ numerator_3 lcm_sum_ __as3_type_Fraction fraction_cblock_chains_ pieces_1_4 pieces_1_4 pieces_1_4 fraction_cblock_chains_ left_96 fraction_cblock_chains_ right_545 num_3 plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_end_marker_noline.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_markers_start_marker.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_dog.swf plain_image_groups_ total_1 plain_image_groups_ url_assets_cms_wootmath_fractions_number_line_objects_cat_dog_trail.swf den_4 fraction_cblock_total_count_3 fraction_cblock_counts_ 1_4_3 Use the 1/4 pieces to figure out how far the dog traveled.Answer: 3/4
data_samples = df3['response_doc']_____no_output_____n_features = 1000
n_samples = len(data_samples)
n_topics = 50
n_top_words = 20_____no_output_____print("Extracting tf-idf features ...")
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2,
max_features=n_features,
stop_words='english')
t0 = time()
tfidf = tfidf_vectorizer.fit_transform(data_samples)
print("done in %0.3fs." % (time() - t0))Extracting tf-idf features ...
done in 8.222s.
# Number of clusters
true_k = 100
km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1,
init_size=1000, batch_size=1000, random_state=62)_____no_output_____print("Clustering with %s" % km)
t0 = time()
km.fit(tfidf)
print("done in %0.3fs" % (time() - t0))
print()Clustering with MiniBatchKMeans(batch_size=1000, compute_labels=True, init='k-means++',
init_size=1000, max_iter=100, max_no_improvement=10,
n_clusters=100, n_init=1, random_state=62, reassignment_ratio=0.01,
tol=0.0, verbose=0)
done in 2.820s
print("Top terms per cluster:")
order_centroids = km.cluster_centers_.argsort()[:, ::-1]
terms = tfidf_vectorizer.get_feature_names()
for i in range(true_k):
print("Cluster %d:\n" % i, end='')
for ind in order_centroids[i, :30]:
print(' --- %s\n' % terms[ind], end='')
print()Top terms per cluster:
Cluster 0:
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- fraction_input_value_
--- long
--- input_a_2
--- enter
--- fraction
--- bar
--- answer
--- sum
--- numbers
--- input_6
--- input_a_6
--- form
--- simplest
--- input_a_4
--- 12
--- input_a_3
--- input_a_8
--- input_b_12
--- input_b_6
--- equation_12
--- equation_6
--- 10
--- input_b_8
--- input_5
--- input_b_4
--- shaded
--- input_a_5
--- match
Cluster 1:
--- fraction_cblock_chains_
--- sum_
--- lcm_sum_
--- pieces_1_9
--- denominator_9
--- __as3_type_fraction
--- numerator_1
--- denominator_1
--- fraction_cblock_counts_
--- bar1_
--- denominator_3
--- numerator_2
--- pieces_1_3
--- fraction
--- left_130
--- fraction_cblock_containment_
--- pieces_1
--- left_90
--- left_80
--- bar2_
--- bar
--- left_200
--- black
--- numerator_4
--- right_820
--- 1_
--- 1_3_
--- 1_1
--- numerator_3
--- right_780
Cluster 2:
--- pieces_1_12
--- fraction_circle_groups_
--- chains_
--- fraction_circle_counts_
--- frac_piece_
--- sum_
--- lcm_sum_
--- scale_1
--- denominator_12
--- __as3_type_fraction
--- fraction
--- fraction_circle_containment_
--- 1_4_
--- reds
--- piece1_
--- 1_6_
--- numerator_1
--- x_300
--- pieces_1_4
--- 1_12_3
--- piece
--- circle1_
--- cover
--- y_350
--- left_270
--- 1_12_2
--- y_300
--- 12
--- piece_0_
--- scale_0
Cluster 3:
--- plain_image_groups_
--- total_1
--- swf
--- answer
--- url_assets_cms_wootmath_fractions_number_line_markers_start_marker
--- length
--- input_a_0
--- correct
--- choose
--- enter
--- comparison
--- start
--- whole_
--- distance
--- traveled
--- shape
--- drag
--- url_assets_cms_wootmath_fractions_number_line_mug_mug_half_01
--- far
--- input_
--- url_assets_cms_wootmath_fractions_number_line_markers_end_marker
--- input_a_
--- label
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_bug_trail
--- robots
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_bubble_trail
--- swam
--- box
--- fish
--- url_assets_cms_wootmath_fractions_number_line_juice_oj_tupperware_fourths_02
Cluster 4:
--- numberline_associations_
--- line
--- location
--- number
--- correct
--- drag
--- label
--- pos_value_1
--- pos_value_0
--- obj_value_
--- obj_value_a
--- yard
--- obj_name_answer_text
--- mile
--- obj_name_eqn
--- biked
--- answer
--- input_
--- ran
--- fraction_cblock_chains_
--- miles
--- input_8
--- obj_value_1
--- input_12
--- obj_name_a_text
--- pos_value_2
--- obj_value_0
--- labels
--- total
--- input_9
Cluster 5:
--- object
--- decimals
--- shown
--- input_
--- input_a_
--- choose
--- comparison
--- correct
--- model
--- answer
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_circle_total_count_12
--- fraction_circle_total_count_3
--- fraction_circle_total_count_4
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- youranswer
--- fraction_circle_total_count_5
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_7
--- fraction_cblock_total_count_6
--- fraction_cblock_total_count_5
Cluster 6:
--- image_object_groups_
--- shade
--- swf
--- 14
--- 11
--- off_2
--- 13
--- total_14
--- answer
--- total_8
--- off_4
--- off_1
--- on_3
--- on_4
--- off_3
--- on_0
--- on_2
--- off_6
--- 12
--- total_6
--- url_assets_objects_singles_octopus
--- total_12
--- 10
--- on_1
--- url_assets_objects_singles_cat
--- 15
--- cats
--- url_assets_objects_singles_piranha
--- off_5
--- total_9
Cluster 7:
--- 43
--- order
--- arrange
--- greatest
--- boxes
--- fractions
--- 63
--- drag
--- 81
--- fraction
--- 73
--- 42
--- 52
--- 18
--- 123
--- 16
--- 14
--- 54
--- 12
--- 51
--- 25
--- 83
--- 58
--- 10
--- fraction_circle_total_count_7
--- fraction_circle_total_count_5
--- fraction_circle_total_count_6
--- youranswer
--- fraction_circle_total_count_8
--- fraction_circle_total_count_9
Cluster 8:
--- grid
--- 10
--- model
--- answer
--- 100
--- popcorn
--- den_10
--- num_7
--- whole_
--- boxes
--- fraction
--- num_9
--- wearing
--- cats
--- num_5
--- think
--- represented
--- input_0
--- bigger
--- input_a_0
--- greater
--- decimal
--- pieces
--- came
--- 1_10
--- cut
--- 1_8
--- radio_group_problem_
--- enter
--- 11
Cluster 9:
--- write
--- used
--- divideboth
--- denominator
--- numerator
--- form
--- simplest
--- number
--- enter
--- answer
--- divide
--- 12
--- 15
--- 10
--- fraction_cblock_total_count_6
--- fraction_circle_total_count_5
--- fraction_cblock_total_count_2
--- fraction_circle_total_count_4
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
--- fraction_circle_total_count_11
--- fraction_cblock_total_count_3
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_circle_groups_
Cluster 10:
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- __as3_type_fraction
--- numerator_1
--- denominator_1
--- fraction_cblock_counts_
--- denominator_3
--- pieces_1_5
--- denominator_5
--- pieces_1_3
--- denominator_7
--- denominator_2
--- numerator_2
--- left_90
--- left_80
--- pieces_1_4
--- denominator_4
--- pieces_1
--- fraction
--- bar2_
--- fraction_cblock_containment_
--- unit2_
--- pieces_1_7
--- pieces_1_6
--- right_780
--- bar1_
--- 1_
--- numerator_3
--- pieces_1_2
Cluster 11:
--- pieces_1_10
--- fraction_circle_groups_
--- chains_
--- lcm_sum_
--- sum_
--- fraction_circle_counts_
--- denominator_10
--- fraction
--- 1_2_
--- __as3_type_fraction
--- scale_1
--- unit_
--- fraction_circle_containment_
--- circle1_
--- 10
--- purples
--- 1_5_
--- 1_
--- x_300
--- scale_0
--- y_300
--- numerator_1
--- pieces_1
--- denominator_5
--- pieces_1_8
--- right_270
--- 1_10_5
--- pieces_1_5
--- unit2_
--- unit1_
Cluster 12:
--- fraction_circle_groups_
--- unit_
--- pieces_1_6
--- fraction_circle_counts_
--- lcm_sum_
--- sum_
--- chains_
--- scale_0
--- __as3_type_fraction
--- x_512
--- fraction_circle_containment_
--- numerator_1
--- y_350
--- 1_1
--- pieces_1_2
--- bigger
--- fraction_circle_total_count_2
--- denominator_6
--- pieces_1
--- denominator_2
--- pieces_1_3
--- 125
--- model
--- scale_1
--- answer
--- 1_2_1
--- left
--- fraction
--- 1_6_3
--- cake
Cluster 13:
--- missing
--- numerator
--- enter
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- whole_
--- answer
--- object
--- input_1
--- mult_n_1_
--- mult_d_1_
--- eqn_2
--- input_a_1
--- den_12
--- 12
--- input_2
--- input_a_2
--- 15
--- den_15
--- eqn_1_2
--- input_3
--- input_a_3
--- den_10
--- 2_3
--- num_7
--- mult_n_2_
--- mult_d_2_
--- den_4
--- num_4
--- den_6
Cluster 14:
--- pieces_1_8
--- fraction_cblock_chains_
--- sum_
--- lcm_sum_
--- __as3_type_fraction
--- denominator_8
--- bar1_
--- numerator_1
--- denominator_1
--- fraction_cblock_counts_
--- left_130
--- right_820
--- bar
--- numerator_3
--- fraction_cblock_containment_
--- plain_image_groups_
--- numerator_8
--- numerator_7
--- pieces_1
--- numerator_5
--- black
--- 1_1
--- 1_8_8
--- object
--- denominator_4
--- fraction
--- eighth
--- denominator_2
--- gray
--- numerator_6
Cluster 15:
--- pieces_1_3
--- fraction_circle_groups_
--- chains_
--- sum_
--- lcm_sum_
--- 1_
--- denominator_3
--- fraction_circle_counts_
--- __as3_type_fraction
--- unit1_
--- unit2_
--- fraction
--- fraction_circle_containment_
--- scale_0
--- circle1_
--- browns
--- 1_3_3
--- pieces_1
--- scale_1
--- circle
--- numerator_1
--- y_300
--- fraction_circle_total_count_4
--- black
--- x_300
--- input_3
--- input_a_3
--- numerator_3
--- 1_1
--- numerator_2
Cluster 16:
--- 81
--- order
--- arrange
--- greatest
--- boxes
--- fractions
--- drag
--- fraction
--- 12
--- 123
--- 41
--- 83
--- 63
--- 54
--- 42
--- 11
--- 125
--- 15
--- 33
--- 32
--- 14
--- 58
--- decimals
--- 10
--- 09
--- input_
--- answer
--- enter
--- fraction_input_value_2
--- fraction_input_value_1_8
Cluster 17:
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- __as3_type_fraction
--- numerator_1
--- denominator_8
--- denominator_12
--- pieces_1_12
--- denominator_1
--- pieces_1_8
--- left_175
--- denominator_4
--- fraction_cblock_counts_
--- pieces_1_6
--- pieces_1_4
--- right_865
--- pieces_1
--- fraction
--- left_90
--- unit3_
--- denominator_6
--- right_347
--- denominator_3
--- unit2_
--- left_347
--- denominator_2
--- numerator_2
--- numerator_3
--- fraction_cblock_containment_
--- right_780
Cluster 18:
--- pieces_1_9
--- fraction_circle_groups_
--- chains_
--- sum_
--- lcm_sum_
--- denominator_9
--- __as3_type_fraction
--- 1_3_
--- fraction_circle_counts_
--- fraction
--- fraction_cblock_chains_
--- 1_
--- numerator_1
--- fraction_circle_containment_
--- scale_1
--- whites
--- numerator_9
--- unit1_
--- bar1_
--- 1_9_9
--- denominator_1
--- pieces_1
--- unit2_
--- y_300
--- x_300
--- 1_9_3
--- 1_1
--- unit_
--- den_9
--- input_a_9
Cluster 19:
--- numberline_associations_
--- line
--- number
--- mile
--- divide
--- location
--- lengths
--- label
--- correct
--- drag
--- pos_value_0
--- equal
--- den_input_4
--- den_input_6
--- answer
--- parts
--- den_input_8
--- den_input_3
--- pos_value_1
--- fraction
--- position_380
--- 25
--- 67
--- position_490
--- 75
--- position_200
--- position_550
--- 33
--- plain_image_groups_
--- position_260
Cluster 20:
--- rectangle
--- fraction_input_value_
--- fraction
--- shade
--- shaded
--- match
--- 2_6
--- circle
--- answer
--- 4_8
--- equivalent
--- input_a_3
--- 2_8
--- 12
--- object
--- input_a_2
--- 24
--- problem
--- input_a_6
--- 13
--- radio_choice_b
--- 16
--- 4_6
--- input_8
--- 3_8
--- input_a_4
--- 10
--- bar
--- 15
--- 3_6
Cluster 21:
--- object
--- number
--- form
--- simplest
--- mixed
--- enter
--- answer
--- whole_1
--- fraction_input_value_1
--- did
--- line
--- shown
--- youranswer
--- miles
--- num_1
--- whole_2
--- fraction_input_value_2
--- den_4
--- long
--- divided
--- den_5
--- swim
--- far
--- bike
--- num_3
--- fraction
--- den_6
--- bar
--- num_2
--- ate
Cluster 22:
--- match
--- fraction_input_value_
--- shade
--- fraction
--- input_a_
--- choose
--- comparison
--- correct
--- circle
--- bar
--- 1_3
--- flower
--- polygon
--- star
--- 3_4
--- 1_6
--- 2_4
--- 4_8
--- 4_6
--- 3_6
--- 3_8
--- 2_5
--- 3_5
--- 2_8
--- 6_8
--- 1_8
--- 2_6
--- 2_3
--- 5_6
--- 1_5
Cluster 23:
--- length
--- yard
--- yards
--- divide
--- line
--- lengths
--- number
--- fraction_input_value_
--- bar
--- equal
--- enter
--- whole_
--- fraction
--- den_input_8
--- num_1
--- divided
--- fifths
--- parts
--- den_input_6
--- den_input_4
--- input_7
--- answer
--- den_8
--- den_input_3
--- den_2
--- num_2
--- den_6
--- den_4
--- den_3
--- num_3
Cluster 24:
--- box
--- drag
--- answer
--- equivalent
--- shown
--- homework
--- person
--- fraction
--- piece
--- shows
--- 12
--- far
--- traveled
--- half
--- yellow
--- pieces
--- bar
--- unit
--- brown
--- 24
--- sum
--- blue
--- decimal
--- dark
--- 51
--- 25
--- greater
--- 22
--- circle
--- black
Cluster 25:
--- fraction_circle_groups_
--- 1_2_
--- pieces_1_2
--- fraction
--- lcm_sum_
--- sum_
--- chains_
--- denominator_2
--- pieces_1_4
--- fraction_circle_counts_
--- __as3_type_fraction
--- 1_
--- fraction_circle_containment_
--- numerator_1
--- scale_1
--- y_300
--- unit1_
--- pieces_1
--- 1_2_2
--- numerator_2
--- unit2_
--- fraction_circle_total_count_3
--- 1_1
--- yellows
--- input_2
--- scale_0
--- circle
--- x_512
--- input_a_2
--- x_300
Cluster 26:
--- fraction_cblock_chains_
--- pieces_1_6
--- lcm_sum_
--- sum_
--- __as3_type_fraction
--- denominator_6
--- numerator_1
--- denominator_1
--- bar1_
--- fraction_cblock_counts_
--- left_130
--- numerator_5
--- denominator_3
--- fraction_cblock_containment_
--- numerator_2
--- right_820
--- pieces_1
--- bar
--- numerator_6
--- unit2_
--- fraction
--- denominator_2
--- left_90
--- unit1_
--- object
--- numerator_4
--- 1_1
--- left_176
--- black
--- 1_6_6
Cluster 27:
--- pizza
--- fraction_circle_groups_
--- ate
--- eat
--- friend
--- x_475
--- y_384
--- did
--- fraction_circle_total_count_1
--- fraction_circle_counts_
--- scale_1
--- 1_1
--- pieces_1
--- greater
--- pieces_1_8
--- 10
--- half
--- answer
--- 12
--- unit_
--- lcm_sum_
--- sum_
--- scale_0
--- chains_
--- denominator_8
--- fraction_circle_total_count_2
--- __as3_type_fraction
--- y_470
--- unit1_
--- leftover
Cluster 28:
--- radio_text_
--- choose
--- comparison
--- input_
--- correct
--- answer
--- input_a_
--- fraction_cblock_total_count_3
--- fraction_cblock_total_count_4
--- fraction_circle_total_count_4
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_cblock_total_count_17
--- fraction_circle_total_count_16
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_cblock_total_count_18
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
--- fraction_circle_total_count_11
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_circle_total_count_5
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_2
--- fraction_cblock_total_count_7
Cluster 29:
--- different
--- numbers
--- 24
--- make
--- true
--- sentence
--- using
--- number
--- enter
--- answer
--- 12
--- comparison1
--- fractions
--- input_
--- input_a_
--- input_2
--- input_a_2
--- correct
--- fraction_circle_total_count_13
--- fraction_circle_total_count_16
--- fraction_circle_total_count_14
--- fraction_circle_total_count_15
--- fraction_circle_total_count_2
--- fraction_circle_total_count_3
--- fraction_circle_total_count_4
--- fraction_circle_total_count_12
--- fraction_input_value_3_6
--- fraction_circle_total_count_11
--- fraction_circle_total_count_1
--- fraction_circle_groups_
Cluster 30:
--- math
--- sentence
--- complete
--- correct
--- drag
--- tenths
--- answer
--- people
--- equally
--- amounts
--- object
--- undefined
--- express
--- cookies
--- 10
--- pizzas
--- numbers
--- boxes
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- input_a_6
--- input_6
--- 13
--- makes
--- true
--- cake
--- wants
--- friend
--- box
--- 11
Cluster 31:
--- fraction_circle_groups_
--- y_350
--- scale_1
--- object
--- fraction_circle_counts_
--- say
--- fraction_circle_total_count_2
--- x_750
--- x_250
--- cover
--- piece
--- dark
--- orange
--- blue
--- 1_5_1
--- brown
--- pink
--- pieces_1_5
--- 1_4_1
--- yellow
--- 1_3_1
--- red
--- pieces_1_4
--- pieces_1_3
--- reds
--- 1_2_1
--- pieces_1_2
--- answer
--- green
--- greens
Cluster 32:
--- numberline_associations_
--- plain_image_groups_
--- total_1
--- swf
--- obj_name_object
--- pos_value_0
--- mile
--- drag
--- start
--- url_assets_cms_wootmath_fractions_number_line_markers_start_marker
--- meter
--- answer
--- beetle
--- input_12
--- shark
--- fraction_cblock_chains_
--- 10
--- final
--- input_4
--- obj_name_obj
--- location
--- input_5
--- tenths
--- yard
--- input_3
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_elephant
--- elephant
--- walked
--- panda
--- position_260
Cluster 33:
--- 2_3
--- match
--- shade
--- fraction_input_value_
--- fraction
--- input_a_
--- choose
--- comparison
--- correct
--- circle
--- bar
--- fraction_cblock_total_count_6
--- fraction_circle_total_count_12
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_cblock_total_count_18
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- fraction_cblock_total_count_2
--- fraction_cblock_total_count_3
--- fraction_cblock_total_count_5
--- fraction_circle_total_count_11
--- fraction_circle_total_count_10
--- fraction_circle_total_count_4
--- fraction_circle_total_count_1
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
Cluster 34:
--- fraction_cblock_chains_
--- sum_
--- lcm_sum_
--- bar1_
--- __as3_type_fraction
--- numerator_1
--- left_130
--- bar
--- denominator_1
--- fraction_cblock_counts_
--- black
--- right_820
--- object
--- pieces_1_4
--- dragging
--- numerator_2
--- denominator_5
--- 1_1
--- pieces_1_5
--- fraction_cblock_containment_
--- denominator_4
--- fraction_cblock_total_count_2
--- denominator_2
--- pieces_1
--- model
--- denominator_3
--- denominator_7
--- fraction_cblock_total_count_3
--- pieces_1_3
--- numerator_3
Cluster 35:
--- plain_image_groups_
--- fraction_cblock_chains_
--- pieces_1_6
--- total_1
--- swf
--- sum_
--- lcm_sum_
--- denominator_6
--- __as3_type_fraction
--- url_assets_cms_wootmath_fractions_number_line_markers_start_marker
--- numberline_associations_
--- numerator_1
--- fraction_cblock_counts_
--- far
--- traveled
--- left_96
--- figure
--- use
--- url_assets_cms_wootmath_fractions_number_line_markers_end_marker
--- yard
--- numerator_5
--- denominator_3
--- pieces
--- 1_6_6
--- start
--- 1_3_
--- den_6
--- input_6
--- fraction_cblock_total_count_6
--- 1_6_4
Cluster 36:
--- den_3
--- fraction_input_value_2_3
--- num_2
--- whole_
--- enter
--- greatest
--- fraction_input_value_1_3
--- fraction
--- answer
--- num_1
--- fractions
--- smallest
--- greater
--- form
--- simplest
--- different
--- wearing
--- improper
--- 15
--- num_6
--- num_7
--- 10
--- num_3
--- complete
--- left
--- write
--- object
--- ate
--- plain_image_groups_
--- difference
Cluster 37:
--- png
--- plain_image_groups_
--- total_2
--- arrows
--- url_assets_cms_wootmath_fractions_ui_left_arrow
--- url_assets_cms_wootmath_fractions_ui_right_arrow
--- decimal
--- use
--- locations
--- points
--- correct
--- total_1
--- location
--- answer
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_9
--- fraction_circle_total_count_4
--- fraction_cblock_total_count_2
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_cblock_total_count_3
--- fraction_cblock_total_count_4
--- fraction_cblock_total_count_5
--- fraction_cblock_total_count_6
--- fraction_circle_total_count_15
--- fraction_cblock_total_count_7
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
Cluster 38:
--- hundredths
--- object
--- grid
--- model
--- decimal
--- answer
--- 19
--- tenth
--- 24
--- 13
--- 15
--- tenths
--- 11
--- 14
--- 80
--- 16
--- 17
--- 18
--- 12
--- 22
--- 10
--- 20
--- 60
--- 40
--- input_a_8
--- input_8
--- use
--- input_a_6
--- shown
--- input_6
Cluster 39:
--- 13
--- 15
--- grid
--- numerator
--- denominator
--- enter
--- den_15
--- half
--- answer
--- model
--- whole_
--- 100
--- input_0
--- hundredths
--- input_a_0
--- 11
--- fraction
--- sum
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- num_11
--- total
--- green
--- decimal
--- 20
--- bar
--- 1_5
--- bigger
--- covered
--- missing
Cluster 40:
--- decimal
--- use
--- make
--- true
--- pieces
--- answer
--- grid
--- shown
--- model
--- 41
--- 43
--- 42
--- 55
--- 73
--- 58
--- 54
--- 51
--- 52
--- 63
--- fraction_circle_total_count_15
--- fraction_circle_total_count_5
--- fraction_circle_total_count_4
--- fraction_circle_total_count_6
--- fraction_circle_total_count_3
--- fraction_circle_total_count_7
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_circle_total_count_10
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
Cluster 41:
--- pieces_1_15
--- fraction_circle_groups_
--- chains_
--- 1_5_
--- 1_3_
--- denominator_15
--- fraction
--- fraction_circle_counts_
--- lcm_sum_
--- sum_
--- scale_1
--- __as3_type_fraction
--- greens
--- fraction_circle_containment_
--- orange
--- x_300
--- circle1_
--- pieces_1_5
--- right_270
--- y_300
--- 1_15_5
--- 1_5_1
--- 1_15_3
--- numerator_1
--- pieces_1_3
--- left_30
--- left_342
--- brown
--- 1_15_
--- 1_3_1
Cluster 42:
--- fraction_input_value_1_2
--- den_2
--- num_1
--- whole_
--- 50
--- enter
--- 100
--- answer
--- different
--- fractions
--- form
--- simplest
--- plain_image_groups_
--- fraction
--- greater
--- smallest
--- 10
--- long
--- total_1
--- swf
--- object
--- express
--- greatest
--- circle
--- 12
--- num_2
--- multiplication
--- makes
--- shaded
--- make
Cluster 43:
--- input_
--- input_a_
--- correct
--- enter
--- comparison1
--- 100
--- 20
--- 50
--- 60
--- 10
--- 80
--- 40
--- 12
--- 24
--- 11
--- 22
--- 51
--- 52
--- 18
--- 31
--- 13
--- 32
--- 14
--- 42
--- 30
--- 19
--- 25
--- 41
--- complete
--- half
Cluster 44:
--- problem_text_1
--- complete
--- addition
--- sentence
--- problem_text_1_2
--- bitmap_text_inputs_
--- bitmap_text_interp_
--- problem_text_2
--- input_b_1
--- input_a_1
--- input_a_2
--- problem_text_0
--- input_b_2
--- answer
--- 1_6
--- 1_8
--- 1_10
--- 1_4
--- input_1
--- 2_8
--- problem_text_3
--- 2_6
--- 4_6
--- input_2
--- input_a_0
--- 2_4
--- 3_4
--- input_a_3
--- 6_8
--- 1_9
Cluster 45:
--- plain_image_groups_
--- fraction_cblock_chains_
--- total_1
--- swf
--- lcm_sum_
--- sum_
--- __as3_type_fraction
--- url_assets_cms_wootmath_fractions_number_line_markers_start_marker
--- traveled
--- url_assets_cms_wootmath_fractions_number_line_markers_end_marker
--- numerator_1
--- fraction_cblock_counts_
--- far
--- left_96
--- pieces_1_10
--- pieces_1_4
--- denominator_3
--- pieces_1_3
--- denominator_4
--- figure
--- denominator_2
--- use
--- distance
--- answer
--- pieces_1_8
--- shows
--- pieces_1_12
--- fraction_cblock_total_count_2
--- pieces_1_2
--- fraction_cblock_total_count_1
Cluster 46:
--- juice
--- pitcher
--- plain_image_groups_
--- orange
--- total_1
--- swf
--- whole_
--- fraction
--- num_1
--- answer
--- num_2
--- den_4
--- den_3
--- den_6
--- den_8
--- fraction_input_value_1_2
--- den_2
--- num_3
--- fraction_input_value_1_3
--- url_assets_cms_wootmath_fractions_number_line_juice_oj_tupperware_fourths_02
--- fraction_input_value_3_4
--- fraction_input_value_2_3
--- fraction_input_value_1_4
--- fraction_input_value_2_4
--- fraction_input_value_1_8
--- fraction_input_value_1_6
--- num_5
--- fraction_input_value_2_6
--- fraction_input_value_2_8
--- fraction_input_value_5_6
Cluster 47:
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- __as3_type_fraction
--- numerator_1
--- left_100
--- bar2_
--- bar1_
--- denominator_2
--- denominator_1
--- right_790
--- pieces_1_12
--- fraction_cblock_counts_
--- fraction_cblock_containment_
--- pieces_1_2
--- pieces_1_8
--- right_445
--- pieces_1
--- pieces_1_6
--- denominator_4
--- bar0_
--- denominator_3
--- pieces_1_4
--- undefined
--- 1_2
--- numerator_2
--- numerator_3
--- denominator_8
--- denominator_6
--- denominator_12
Cluster 48:
--- pieces_1_10
--- fraction_cblock_chains_
--- sum_
--- lcm_sum_
--- __as3_type_fraction
--- denominator_10
--- numerator_1
--- denominator_1
--- bar1_
--- fraction_cblock_counts_
--- denominator_5
--- denominator_2
--- bar2_
--- left_100
--- left_80
--- fraction_cblock_containment_
--- unit1_
--- unit2_
--- pieces_1
--- 10
--- numerator_3
--- numerator_5
--- numerator_2
--- pieces_1_6
--- fraction
--- numerator_4
--- pieces_1_5
--- numerator_7
--- left_130
--- numerator_9
Cluster 49:
--- area_target_contents_
--- plain_image_groups_
--- image_object_groups_
--- x_468
--- swf
--- y_118
--- total_1
--- drag
--- night
--- piranhas
--- answer
--- chocolate
--- pizza
--- fish
--- off_5
--- on_0
--- tenths
--- on_3
--- box
--- 10
--- number
--- off_6
--- correct
--- total_12
--- on_2
--- total_9
--- off_2
--- total_8
--- off_3
--- on_4
Cluster 50:
--- makes
--- input_a_1
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- statement
--- true
--- enter
--- number
--- fraction_input_value_
--- form
--- simplest
--- input_1
--- 10
--- fraction
--- input_a_4
--- input_4
--- 2_4
--- answer
--- input_3
--- input_a_3
--- input_a_5
--- input_5
--- input_2
--- 4_8
--- 12
--- input_a_2
--- input_a_6
--- input_6
--- 2_8
--- 3_6
Cluster 51:
--- 12
--- arrange
--- greatest
--- boxes
--- fractions
--- drag
--- fraction
--- 11
--- 10
--- answer
--- total
--- grid
--- model
--- fraction_cblock_chains_
--- fraction_cblock_total_count_1
--- lcm_sum_
--- sum_
--- numerator_1
--- middle
--- __as3_type_fraction
--- shade
--- denominator_3
--- greater
--- left
--- circle
--- fraction_cblock_counts_
--- 1_3_1
--- enter
--- 14
--- bar
Cluster 52:
--- make
--- true
--- boxes
--- fractions
--- comparison
--- drag
--- answer
--- 12
--- tenths
--- enter
--- numbers
--- 10
--- statement
--- input_
--- input_a_5
--- input_5
--- input_a_4
--- input_a_6
--- input_4
--- input_6
--- input_a_7
--- input_a_8
--- input_3
--- input_a_3
--- input_7
--- input_8
--- input_2
--- input_a_2
--- 16
--- input_a_9
Cluster 53:
--- denominator
--- greater
--- fractions
--- input_a_3
--- input_a_2
--- input_a_4
--- input_a_5
--- input_a_6
--- bigger
--- smaller
--- input_a_1
--- input_a_7
--- answer
--- enter
--- input_4
--- input_a_9
--- input_9
--- whole_
--- input_5
--- input_1
--- input_2
--- input_a_
--- input_a_8
--- input_a_10
--- input_3
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- input_b_4
--- input_b_6
--- input_b_2
Cluster 54:
--- object
--- decimal
--- input_0
--- input_a_0
--- half
--- enter
--- hundredths
--- answer
--- model
--- 14
--- 17
--- 16
--- 18
--- bigger
--- 19
--- 12
--- 22
--- 09
--- 25
--- bar
--- black
--- 11
--- 50
--- 15
--- 33
--- 24
--- plain_image_groups_
--- make
--- 41
--- dragging
Cluster 55:
--- using
--- model
--- answer
--- size
--- fraction
--- pieces
--- equivalent
--- greater
--- cover
--- dark
--- equal
--- grays
--- sixths
--- blue
--- thirds
--- fifths
--- yellows
--- yellow
--- halves
--- fourths
--- browns
--- piece
--- blues
--- smaller
--- cake
--- reds
--- pinks
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
--- fraction_input_value_2_5
Cluster 56:
--- plain_image_groups_
--- radio_group_mc1_
--- radio_group_mc2_
--- text_yes
--- choice_a
--- total_1
--- swf
--- shapes
--- object
--- shaded
--- text_no
--- choice_b
--- url_assets_cms_wootmath_fractions_equal_parts_fourths_fourth_03
--- answer
--- fraction_circle_total_count_1
--- fraction_circle_total_count_10
--- fraction_circle_total_count_14
--- fraction_circle_total_count_11
--- fraction_circle_total_count_12
--- fraction_circle_total_count_13
--- fraction_circle_total_count_15
--- youranswer
--- fraction_circle_counts_
--- fraction_circle_groups_
--- fraction_circle_total_count_2
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_7
--- fraction_cblock_total_count_6
Cluster 57:
--- fraction_circle_groups_
--- scale_0
--- fraction_circle_counts_
--- piece
--- scale_1
--- x_675
--- x_200
--- unit_
--- x_811
--- box
--- pieces_1_10
--- pieces_1_9
--- unit
--- fraction_circle_total_count_4
--- pieces_1_6
--- pieces
--- chains_
--- x_550
--- y_450
--- y_415
--- y_300
--- pieces_1_4
--- answer
--- pieces_1_5
--- drag
--- 1_10_1
--- half
--- 1_6_1
--- lcm_sum_
--- sum_
Cluster 58:
--- arrange
--- order
--- greatest
--- boxes
--- fractions
--- drag
--- fraction
--- 54
--- 51
--- 42
--- 83
--- 44
--- 123
--- 32
--- 52
--- 31
--- answer
--- 12
--- decimals
--- 41
--- 15
--- 63
--- 20
--- 18
--- 34
--- 16
--- 22
--- input_
--- 14
--- 75
Cluster 59:
--- plain_image_groups_
--- hot
--- mug
--- chocolate
--- whole_
--- num_1
--- fraction_input_value_1_4
--- den_4
--- total_1
--- swf
--- fraction
--- url_assets_cms_wootmath_fractions_number_line_mug_mug_half_01
--- answer
--- den_3
--- fraction_input_value_1_3
--- fraction_input_value_1_2
--- num_2
--- den_2
--- fraction_input_value_2_4
--- fraction_input_value_2_3
--- wearing
--- fraction_input_value_3_4
--- num_3
--- circle
--- 25
--- long
--- 100
--- den_1
--- enter
--- fraction_input_value_1_5
Cluster 60:
--- pieces_1_6
--- fraction_circle_groups_
--- chains_
--- lcm_sum_
--- sum_
--- fraction_circle_counts_
--- denominator_6
--- unit1_
--- __as3_type_fraction
--- unit2_
--- circle1_
--- fraction_circle_containment_
--- scale_0
--- 1_2_
--- fraction
--- 1_
--- y_300
--- pieces_1
--- numerator_1
--- scale_1
--- pinks
--- x_300
--- pieces_1_8
--- 1_6_3
--- denominator_3
--- numerator_2
--- numerator_6
--- answer
--- circle
--- object
Cluster 61:
--- den_10
--- whole_
--- 10
--- smallest
--- num_1
--- fraction
--- enter
--- answer
--- fraction_input_value_1_5
--- fraction_input_value_1_10
--- fraction_input_value_1_8
--- den_5
--- popcorn
--- num_2
--- den_8
--- smaller
--- wearing
--- robots
--- num_7
--- fraction_input_value_1_7
--- greater
--- fraction_input_value_1_6
--- den_7
--- num_3
--- num_6
--- boxes
--- den_6
--- num_9
--- greatest
--- num_5
Cluster 62:
--- form
--- simplest
--- enter
--- difference
--- mult_n_1_
--- mult_d_1_
--- num_1
--- object
--- sum
--- answer
--- whole_
--- mult_d_2_
--- mult_n_2_
--- den_4
--- num_3
--- den_6
--- fraction_input_value_1
--- whole_1
--- den_3
--- whole_3
--- fraction_input_value_3
--- fraction_input_value_2
--- whole_2
--- bitmap_text_inputs_
--- bitmap_text_interp_
--- den_2
--- num_2
--- 15
--- den_5
--- 1_4
Cluster 63:
--- shaded
--- parts
--- whole_
--- equal
--- den_8
--- fraction
--- rectangle
--- den_6
--- answer
--- flower
--- polygon
--- star
--- num_1
--- num_3
--- num_2
--- num_4
--- den_7
--- num_7
--- fraction_input_value_1_8
--- num_5
--- fraction_input_value_3_6
--- fraction_input_value_1_6
--- fraction_input_value_2_8
--- fraction_input_value_4_8
--- num_6
--- fraction_input_value_2_6
--- piranhas
--- fraction_input_value_6_8
--- fraction_input_value_3_8
--- fraction_input_value_4_6
Cluster 64:
--- 30
--- input_
--- input_a_
--- enter
--- correct
--- 16
--- 10
--- 20
--- 17
--- 14
--- 100
--- 50
--- 13
--- comparison1
--- half
--- 60
--- 12
--- input_0
--- hundredths
--- input_a_0
--- homework
--- answer
--- total
--- 15
--- 80
--- math
--- 34
--- 11
--- makes
--- 32
Cluster 65:
--- mile
--- run
--- block
--- ran
--- does
--- wants
--- miles
--- answer
--- long
--- far
--- drag
--- did
--- walked
--- rex
--- whole_
--- elephant
--- object
--- num_1
--- input_a_2
--- input_a_3
--- distance
--- total
--- line
--- express
--- fraction_input_value_1
--- whole_1
--- hippo
--- number
--- giraffe
--- den_2
Cluster 66:
--- den_5
--- fraction_input_value_4_5
--- num_4
--- whole_
--- fraction_input_value_2_5
--- fraction
--- enter
--- greatest
--- num_2
--- answer
--- wearing
--- num_3
--- smallest
--- piranhas
--- shaded
--- greater
--- 15
--- form
--- fractions
--- simplest
--- different
--- rectangle
--- smaller
--- 10
--- ate
--- left
--- robots
--- improper
--- num_7
--- pizza
Cluster 67:
--- words
--- model_lbl_0
--- express
--- decimal
--- tenths
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- model
--- answer
--- input_b_5
--- input_b_4
--- input_b_6
--- input_b_7
--- input_a_5
--- input_b_1
--- input_b_3
--- tenth
--- input_a_4
--- input_a_9
--- input_a_6
--- input_a_7
--- input_b_8
--- input_a_1
--- input_a_8
--- input_a_3
--- input_b_2
--- input_a_2
--- 10
--- input_a_
--- input_a_10
Cluster 68:
--- radio_group_problem_
--- radio_choice_b
--- radio_choice_a
--- choice_b
--- numerator
--- choice_a
--- half
--- denominator
--- fraction
--- estimate
--- ate
--- cake
--- greater
--- pie
--- fraction_circle_groups_
--- fourth
--- bar
--- text_one
--- radio_text_one
--- piece
--- cut
--- y_400
--- eat
--- seventh
--- fraction_circle_total_count_1
--- 24
--- did
--- unit
--- x_512
--- fifth
Cluster 69:
--- grid
--- model
--- answer
--- represented
--- input0_0
--- 100
--- 14
--- enter
--- numbers
--- 09
--- 20
--- boxes
--- drag
--- decimal
--- 18
--- input_0
--- input_a_0
--- pieces
--- covering
--- 12
--- 55
--- 40
--- 10
--- 33
--- 41
--- 75
--- 32
--- 73
--- fraction_circle_total_count_5
--- fraction_circle_total_count_8
Cluster 70:
--- pieces_1_8
--- fraction_circle_groups_
--- chains_
--- sum_
--- lcm_sum_
--- fraction_circle_counts_
--- denominator_8
--- __as3_type_fraction
--- 1_2_
--- fraction_circle_containment_
--- fraction
--- piece_0_
--- unit1_
--- unit_
--- scale_1
--- scale_0
--- unit2_
--- pieces_1_4
--- y_300
--- numerator_1
--- pieces_1
--- circle1_
--- left_0
--- piece_1_
--- 1_8_4
--- x_512
--- denominator_4
--- piece_2_
--- 1_
--- 1_1
Cluster 71:
--- amounts
--- model
--- answer
--- tenths
--- box
--- drag
--- fraction_circle_total_count_4
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_circle_total_count_5
--- fraction_circle_total_count_6
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
--- fraction_circle_total_count_11
--- fraction_circle_total_count_16
--- youranswer
--- fraction_circle_total_count_1
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_7
--- fraction_cblock_total_count_6
--- fraction_cblock_total_count_5
--- fraction_cblock_total_count_4
--- fraction_cblock_total_count_3
--- fraction_cblock_total_count_2
Cluster 72:
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- left_125
--- __as3_type_fraction
--- numerator_1
--- 1_
--- right_815
--- denominator_1
--- fraction
--- denominator_2
--- fraction_cblock_counts_
--- right_470
--- denominator_4
--- pieces_1
--- denominator_12
--- pieces_1_2
--- bitmap_text_interp_
--- bitmap_text_inputs_
--- 1_2
--- right_297
--- denominator_3
--- pieces_1_6
--- pieces_1_10
--- shown
--- pieces_1_12
--- enter
--- fraction_cblock_containment_
--- left_297
--- equal
Cluster 73:
--- 1_3_
--- pieces_1_6
--- fraction_circle_groups_
--- chains_
--- fraction
--- fraction_circle_counts_
--- pieces_1_3
--- sum_
--- lcm_sum_
--- scale_1
--- __as3_type_fraction
--- brown
--- denominator_6
--- fraction_circle_containment_
--- 1_3_1
--- 1_6_2
--- x_512
--- numerator_1
--- y_300
--- denominator_3
--- numerator_2
--- pizza
--- left_0
--- right_240
--- cover
--- fractionthat
--- right_120
--- 1_1
--- input_2
--- left_30
Cluster 74:
--- complete
--- sentence
--- bitmap_text_inputs_
--- bitmap_text_interp_
--- addition
--- problem_text_3
--- problem_text_1_2
--- problem_text_0
--- input_a_3
--- input_a_5
--- answer
--- math
--- input_b_3
--- input_a_4
--- problem_text_2
--- input_5
--- multiplication
--- 3_6
--- input_b_2
--- input_4
--- input_a_6
--- input_b_4
--- 4_6
--- 10
--- plain_image_groups_
--- input_3
--- 11
--- input_6
--- input_1
--- input_a_7
Cluster 75:
--- pieces_1_7
--- denominator_7
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- fraction_circle_groups_
--- __as3_type_fraction
--- bar1_
--- chains_
--- left_130
--- numerator_1
--- denominator_1
--- black
--- bar
--- object
--- seventh
--- fraction_cblock_counts_
--- numerator_5
--- fraction_circle_counts_
--- numerator_6
--- pieces_1
--- unit1_
--- 1_1
--- numerator_3
--- 1_
--- right_820
--- numerator_4
--- dragging
--- fraction
--- light
Cluster 76:
--- pieces_1_5
--- fraction_circle_groups_
--- sum_
--- lcm_sum_
--- denominator_5
--- fraction_cblock_chains_
--- chains_
--- __as3_type_fraction
--- circle1_
--- fraction_circle_counts_
--- bar1_
--- numerator_1
--- 1_
--- denominator_1
--- numerator_4
--- scale_0
--- numerator_5
--- unit1_
--- fraction_circle_containment_
--- pieces_1
--- fraction
--- unit2_
--- 1_5_5
--- object
--- black
--- 1_1
--- fifth
--- scale_1
--- numerator_3
--- circle
Cluster 77:
--- fraction_circle_groups_
--- circle1_1_
--- circle1_2_
--- lcm_sum_
--- sum_
--- fraction_circle_counts_
--- y_350
--- scale_1
--- __as3_type_fraction
--- pieces_1_12
--- numerator_1
--- object
--- pieces_1_6
--- chains_
--- fraction_circle_containment_
--- x_750
--- x_250
--- say
--- pieces_1_8
--- cover
--- pieces_1_15
--- piece
--- pieces_1_4
--- dark
--- pieces_1_10
--- yellow
--- left_270
--- pieces_1_9
--- blue
--- denominator_6
Cluster 78:
--- pieces_1_12
--- fraction_circle_groups_
--- sum_
--- lcm_sum_
--- fraction_cblock_chains_
--- chains_
--- __as3_type_fraction
--- denominator_12
--- 1_2_
--- fraction
--- fraction_circle_counts_
--- numerator_1
--- unit_
--- 12
--- fraction_circle_containment_
--- scale_1
--- fraction_cblock_counts_
--- 1_12_6
--- denominator_2
--- denominator_1
--- numerator_6
--- reds
--- pieces_1
--- input_a_6
--- pieces_1_2
--- input_6
--- numerator_11
--- yellow
--- 1_1
--- x_300
Cluster 79:
--- yards
--- long
--- fraction_input_value_
--- bar
--- whole_
--- fraction
--- num_1
--- den_6
--- num_2
--- den_2
--- object
--- den_4
--- den_8
--- whole_3
--- fraction_input_value_3
--- num_3
--- den_3
--- num_5
--- num_4
--- 5_6
--- den_5
--- den_7
--- num_7
--- num_6
--- den_1
--- den_9
--- answer
--- 4_6
--- den_
--- 3_6
Cluster 80:
--- den_9
--- whole_
--- fraction
--- smallest
--- enter
--- fraction_input_value_1_9
--- answer
--- num_2
--- num_4
--- greater
--- num_1
--- num_5
--- shaded
--- num_8
--- num_6
--- num_7
--- num_3
--- wearing
--- different
--- smaller
--- fractions
--- piranhas
--- cats
--- greatest
--- 10
--- 12
--- 15
--- ate
--- fraction_input_value_2_4
--- den_8
Cluster 81:
--- pieces_1_12
--- 1_3_
--- fraction_circle_groups_
--- chains_
--- fraction
--- fraction_circle_counts_
--- lcm_sum_
--- sum_
--- left_30
--- 1_12_4
--- brown
--- input_a_4
--- reds
--- input_4
--- denominator_12
--- pieces_1_3
--- __as3_type_fraction
--- 1_3_1
--- right_270
--- scale_1
--- fraction_circle_total_count_5
--- numerator_4
--- y_300
--- x_300
--- fraction_circle_containment_
--- denominator_3
--- numerator_1
--- equals
--- equal
--- answer
Cluster 82:
--- whole_
--- den_7
--- num_5
--- fraction
--- den_6
--- greatest
--- enter
--- answer
--- fraction_input_value_5_6
--- den_8
--- wearing
--- num_6
--- num_4
--- num_3
--- num_2
--- fraction_input_value_5_8
--- fraction_input_value_4_6
--- different
--- greater
--- num_7
--- fraction_input_value_2_6
--- fraction_input_value_3_6
--- shaded
--- smaller
--- fractions
--- fraction_input_value_4_8
--- fraction_input_value_1_7
--- piranhas
--- num_1
--- cats
Cluster 83:
--- tothe
--- ans1
--- box
--- fractions
--- denominator
--- ans0
--- numerator
--- drag
--- answer
--- ans2
--- fraction_cblock_total_count_4
--- fraction_circle_total_count_13
--- fraction_cblock_total_count_2
--- fraction_circle_total_count_5
--- fraction_circle_total_count_4
--- fraction_circle_total_count_3
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_12
--- fraction_cblock_total_count_5
--- fraction_circle_total_count_11
--- fraction_circle_total_count_6
--- fraction_circle_total_count_1
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
Cluster 84:
--- pieces_1_8
--- 1_4_
--- fraction_circle_groups_
--- chains_
--- pieces_1_4
--- fraction
--- fraction_circle_counts_
--- sum_
--- lcm_sum_
--- left_0
--- scale_1
--- __as3_type_fraction
--- denominator_8
--- x_512
--- blue
--- fraction_circle_containment_
--- dark
--- denominator_4
--- y_300
--- numerator_2
--- numerator_1
--- 1_4_1
--- pizza
--- 1_8_2
--- fractionthat
--- cover
--- right_180
--- right_270
--- 1_1
--- right_90
Cluster 85:
--- fraction_cblock_chains_
--- sum_
--- lcm_sum_
--- __as3_type_fraction
--- numerator_1
--- left_175
--- unit1_
--- unit2_
--- denominator_1
--- right_865
--- unit3_
--- fraction_cblock_counts_
--- denominator_4
--- fraction_cblock_containment_
--- denominator_12
--- denominator_6
--- denominator_8
--- pieces_1
--- denominator_2
--- pieces_1_6
--- pieces_1_4
--- pieces_1_12
--- denominator_3
--- right_347
--- pieces_1_8
--- fraction_cblock_total_count_6
--- numerator_2
--- right_290
--- pieces_1_3
--- input_
Cluster 86:
--- 1_4
--- match
--- fraction_input_value_
--- shade
--- fraction
--- input_a_
--- choose
--- comparison
--- correct
--- bar
--- star
--- circle
--- rectangle
--- fraction_cblock_total_count_6
--- fraction_cblock_total_count_2
--- fraction_circle_total_count_2
--- fraction_circle_total_count_16
--- fraction_circle_total_count_15
--- fraction_cblock_total_count_18
--- fraction_circle_total_count_14
--- fraction_circle_total_count_13
--- fraction_circle_total_count_12
--- fraction_cblock_total_count_3
--- fraction_cblock_total_count_7
--- fraction_circle_total_count_11
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_cblock_total_count_4
--- fraction_circle_groups_
--- fraction_circle_counts_
Cluster 87:
--- pieces_1_15
--- fraction_cblock_chains_
--- lcm_sum_
--- sum_
--- denominator_15
--- __as3_type_fraction
--- numerator_1
--- fraction_cblock_counts_
--- denominator_1
--- left_90
--- denominator_3
--- 15
--- numerator_2
--- plain_image_groups_
--- fraction
--- bar1_
--- pieces_1_3
--- denominator_5
--- unit1_
--- 1_3_
--- fraction_cblock_containment_
--- right_780
--- pieces_1
--- pieces_1_12
--- unit_
--- pieces_1_5
--- numerator_4
--- total_1
--- numerator_5
--- numerator_14
Cluster 88:
--- pieces_1_6
--- piece_0_
--- fraction_circle_groups_
--- piece_1_
--- chains_
--- pieces_1_3
--- lcm_sum_
--- sum_
--- fraction_circle_counts_
--- right_120
--- left_0
--- __as3_type_fraction
--- denominator_6
--- fraction_circle_containment_
--- x_512
--- denominator_3
--- numerator_2
--- scale_1
--- cover
--- y_300
--- numerator_1
--- sixth
--- pieces
--- 1_3_2
--- 1_1
--- shown
--- 1_6_4
--- fractionthat
--- fraction_circle_total_count_7
--- pieces_1
Cluster 89:
--- pieces_1_9
--- circle1_
--- fraction_circle_groups_
--- chains_
--- denominator_9
--- fraction_circle_counts_
--- lcm_sum_
--- sum_
--- ninth
--- scale_1
--- numerator_8
--- object
--- y_325
--- x_300
--- __as3_type_fraction
--- 1_9_8
--- brown
--- dragging
--- fraction_circle_containment_
--- numerator_7
--- circle
--- pieces
--- fraction_circle_total_count_9
--- black
--- right_0
--- white
--- 1_1
--- left_320
--- whites
--- model
Cluster 90:
--- fraction_circle_groups_
--- brown
--- fraction_circle_total_count_1
--- say
--- cover
--- y_350
--- piece
--- 1_3_1
--- x_300
--- scale_1
--- pieces_1_3
--- fraction_circle_counts_
--- reds
--- pink
--- circle1_
--- pinks
--- whites
--- red
--- white
--- answer
--- pieces_1_6
--- lcm_sum_
--- sum_
--- 1_6_1
--- x_250
--- fraction_circle_total_count_2
--- numerator_1
--- __as3_type_fraction
--- denominator_6
--- equal
Cluster 91:
--- tenths
--- object
--- grid
--- model
--- answer
--- 10
--- youranswer
--- fraction_circle_total_count_14
--- fraction_circle_total_count_11
--- fraction_circle_total_count_12
--- fraction_circle_total_count_13
--- fraction_circle_total_count_16
--- fraction_circle_total_count_15
--- fraction_circle_total_count_2
--- fraction_circle_total_count_3
--- fraction_circle_total_count_4
--- fraction_circle_total_count_5
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_circle_total_count_7
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
--- fraction_cblock_total_count_9
--- fraction_cblock_total_count_8
--- fraction_cblock_total_count_7
--- fraction_cblock_total_count_6
--- fraction_cblock_total_count_5
--- fraction_cblock_total_count_4
--- fraction_cblock_total_count_3
Cluster 92:
--- plain_image_groups_
--- url_assets_cms_wootmath_fractions_misc_objects_ladybug_alt
--- url_assets_cms_wootmath_fractions_misc_objects_ant_alt
--- swf
--- ladybugs
--- bugs
--- total_2
--- total_3
--- form
--- simplest
--- whole_
--- enter
--- total_4
--- num_1
--- fraction_input_value_1_2
--- den_2
--- fraction
--- answer
--- den_3
--- total_6
--- fraction_input_value_1_3
--- num_2
--- den_4
--- num_3
--- total_1
--- fraction_input_value_2_3
--- fraction_input_value_1_4
--- fraction_input_value_3_4
--- equivalent
--- total_9
Cluster 93:
--- plain_image_groups_
--- fraction_cblock_chains_
--- pieces_1_8
--- total_1
--- swf
--- lcm_sum_
--- sum_
--- figure
--- far
--- traveled
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_bug_trail
--- use
--- __as3_type_fraction
--- url_assets_cms_wootmath_fractions_number_line_markers_start_marker
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_ladybug
--- ladybug
--- denominator_8
--- left_96
--- pieces
--- fraction_cblock_counts_
--- numerator_1
--- fraction_cblock_total_count_1
--- input_1
--- denominator_1
--- left_99
--- answer
--- numerator_3
--- ant
--- url_assets_cms_wootmath_fractions_number_line_objects_v2_ant
--- numerator_5
Cluster 94:
--- 11
--- think
--- came
--- grid
--- greater
--- cut
--- bigger
--- piece
--- radio_group_problem_
--- birthday
--- 10
--- model
--- half
--- cake
--- 1_8
--- answer
--- pan
--- brownies
--- pie
--- piranhas
--- pieces
--- smaller
--- hundredths
--- 100
--- radio_choice_b
--- choice_b
--- 1_9
--- 1_10
--- equal
--- input_0
Cluster 95:
--- 10
--- arrange
--- greatest
--- boxes
--- fractions
--- drag
--- fraction
--- answer
--- statement
--- greater
--- choose
--- correct
--- meter
--- beetle
--- start
--- shade
--- fraction_circle_total_count_15
--- fraction_circle_total_count_14
--- fraction_circle_total_count_12
--- fraction_circle_total_count_16
--- fraction_circle_total_count_2
--- fraction_circle_total_count_3
--- fraction_circle_total_count_13
--- youranswer
--- fraction_circle_total_count_11
--- fraction_circle_total_count_10
--- fraction_circle_total_count_1
--- fraction_circle_groups_
--- fraction_circle_counts_
--- fraction_circle_containment_
Cluster 96:
--- whole_
--- den_12
--- enter
--- den_4
--- 12
--- fraction
--- smallest
--- num_3
--- num_2
--- fraction_input_value_3_4
--- greatest
--- answer
--- fraction_input_value_2_4
--- smaller
--- fraction_input_value_1_12
--- num_1
--- num_10
--- greater
--- fractions
--- num_4
--- fraction_input_value_1_4
--- 10
--- num_6
--- different
--- num_7
--- num_5
--- den_8
--- fraction_input_value_3_8
--- shaded
--- equal
Cluster 97:
--- fraction_circle_groups_
--- fraction_circle_total_count_1
--- x_300
--- scale_1
--- fraction_circle_counts_
--- circle
--- yellow
--- circle1_
--- black
--- say
--- dark
--- cover
--- y_350
--- y_300
--- pieces_1
--- 1_1
--- fraction_circle_total_count_2
--- pieces_1_2
--- answer
--- equals
--- blue
--- 1_2_1
--- blues
--- piece
--- grays
--- yellows
--- equal
--- input_a_1
--- browns
--- lcm_sum_
Cluster 98:
--- den_15
--- 15
--- whole_
--- smallest
--- enter
--- fraction
--- smaller
--- answer
--- num_2
--- 11
--- greatest
--- num_11
--- num_1
--- greater
--- 12
--- num_7
--- num_4
--- num_5
--- num_10
--- denominator
--- numerator
--- num_3
--- num_8
--- 10
--- num_6
--- num_9
--- 14
--- numbers
--- shaded
--- circle
Cluster 99:
--- pieces_1_4
--- fraction_circle_groups_
--- chains_
--- lcm_sum_
--- sum_
--- fraction_circle_counts_
--- denominator_4
--- __as3_type_fraction
--- 1_
--- unit_
--- fraction_circle_containment_
--- scale_1
--- unit1_
--- fraction
--- circle1_
--- pieces_1
--- piece_0_
--- unit2_
--- scale_0
--- y_300
--- numerator_1
--- 1_4_4
--- circle
--- 1_1
--- fraction_circle_total_count_5
--- x_300
--- black
--- numerator_4
--- dark
--- right_180
df3['cluster_100'] = km.labels______no_output_____df3['trait_1'] = df3['behavioral_traits'].apply(lambda x : x[0] if len(x) > 0 else 'None' )
df3['trait_2'] = df3['behavioral_traits'].apply(lambda x : x[1] if len(x) > 1 else 'None' ) _____no_output_____df_trait_1 = df3.groupby(['cluster_100', 'trait_1']).size().unstack(fill_value=0)
df_trait_2 = df3.groupby(['cluster_100', 'trait_2']).size().unstack(fill_value=0)_____no_output_____df_cluster_100 = df3.groupby('cluster_100')_____no_output_____df_trait_1.index.rename('cluster_100', inplace=True)
df_trait_2.index.rename('cluster_100', inplace=True)
df_traits = pd.concat([df_trait_1, df_trait_2], axis=1)_____no_output_____df_traits = df_traits.drop('None', axis=1)_____no_output_____#df_traits_norm = (df_traits - df_traits.mean()) / (df_traits.max() - df_traits.min())
df_traits_norm = (df_traits / (df_traits.sum()) )_____no_output_____fig = plt.figure(figsize=(18.5, 16))
cmap = sns.cubehelix_palette(light=.95, as_cmap=True)
sns.heatmap(df_traits_norm, cmap=cmap, linewidths=.5)
#sns.heatmap(df_traits_norm, cmap="YlGnBu", linewidths=.5)_____no_output_____
</code>
_____no_output_____
| {
"repository": "bdmckean/woot_math_analysis",
"path": "working/EDA_WM-BrianMc-topics-Method2-heat_map-100-clusters-random62.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 267919,
"hexsha": "d089926154e4bdd8a0db4421b9a401570de5d537",
"max_line_length": 145398,
"avg_line_length": 69.6798439532,
"alphanum_fraction": 0.7500961111
} |
# Notebook from nicholasneo78/NLP-assignment
Path: Assignment 1/SourceCode/Web Scraping Yelp.ipynb
<code>
import bs4 as bs
import re
import urllib.request as url_____no_output_____reviewDict = []_____no_output_____#Save in a file
filename = "yelp_Clinton_Street_Baking Company_&_Restaurant_scrapped.csv"
f = open(filename, "w")_____no_output_____#reviewNo = [0,20,40,60]
reviewNo = str(60)_____no_output_____source = url.urlopen('https://www.yelp.com/biz/clinton-street-baking-company-and-restaurant-singapore?osq=Waffles&start='+reviewNo+'&sort_by=rating_desc')_____no_output_____page_soup = bs.BeautifulSoup(source, 'html.parser')_____no_output_____mains = page_soup.find_all("div", {"class": "lemon--div__373c0__1mboc arrange-unit__373c0__o3tjT arrange-unit-grid-column--8__373c0__2dUx_ border-color--default__373c0__3-ifU"})_____no_output_____#Set loop over 1 attribute
count = 0
for main in mains:
print()
print("====Review",count,"====")
reviews = ""
try:
try:
ratings = main.find("span", {"class": "lemon--span__373c0__3997G display--inline__373c0__3JqBP border-color--default__373c0__3-ifU"}).div.get('aria-label')
print("Rating:",ratings)
except:
print("--Cannot find ratings--")
print()
#Code to clean for angular brackets < >
cleaned_item = re.sub('\<.*?(\S*)?\>','',str(main))
#Code to clean for angular brackets {}
cleaned_item = re.sub('\{.*?(\S*)?\}','',str(cleaned_item))
#Code to replace , with fullstop
cleaned_item = re.sub(',','.',str(cleaned_item))
print(cleaned_item)
reviews = cleaned_item
reviewDict.append([ratings[0],reviews])
except:
print("Error")
count+=1
====Review 0 ====
--Cannot find ratings--
1 star ratingEek! Methinks not.2 star ratingMeh. I've experienced better.3 star rating4 star ratingYay! I'm a fan.5 star ratingWoohoo! As good as it gets!Start your review of Clinton Street Baking Company & Restaurant
====Review 1 ====
Rating: 3 star rating
.css-1qkiuob.css-1qkiuob svg 1 photo.css-z6383k.css-z6383k svg 1 check-inMy first time here. Ever. Didn't know about the New York branch so there is no basis for comparison. I've heard lots of excited rumblings of course, and friends who used to live in New York were thrilled about its opening. Fair enough. From all accounts, I hear that this place largely works. The food is comparable and all. I'll have to take their word for it. I only came here for some cake and I got the upside down pineapple cake. I don't know why but seeing a badly sliced and sloppily presented cake annoyed me. It shows a lack of care in the attentive-level of staff.When I went to pay, I noticed that other cakes (of other flavors) were similarly badly sliced. Woah. I really did think Clinton St Bakery was better than this. Taste-wise, the cake was fine. Not great, not terrible. The staff was sufficiently polite. But I think the devil's in the details and they need to teach their staff how to slice cakes properly, lift the slices and place them on plates..css-1c2abjj.css-1c2abjj svgUseful Funny Cool
====Review 2 ====
Rating: 3 star rating
2 photosDefinitely order the chicken and waffles. Staff recommended to order onion rings as sides. Do not order onion rings. Completely disappointing. We came on a Monday afternoon, not too busy just some tables of ladies of leisure... but be aware your last order is at 5pm.
====Review 3 ====
Rating: 3 star rating
The Good, The Bad, and The Bourbon Salted Caramel MilkshakeI preface this by saying I absolutely love Clinton Street Baking Co. in New York. The Singapore shop matches the New York venue in some respects (main courses and drinks) and fails in other areas (pies and service).I'll start with the good. For a shop known for its baked goods and pancakes, the drinks were fantastic. The bourbon salted caramel milkshake was the best thing we had and the bloody mary, which is so rarely made well, was also delicious. The main courses we tried (pancakes and huevos rancheros) were every bit as good as the NY shop.Unfortunately, there were several areas where Clinton Street really struggled. First, the service was horrible. After we were seated, not one staff member came to our table without being asked. We had to ask for drinks, for menus, to order, to get a dessert menu, to get a check, and to have our check taken and processed. When there were quality issues with food (see below), we did not get an apology and the waiter seemed annoyed to have to deal with the issue.Second, when we arrived for brunch, two of the three pies of the day were unavailable. No explanation was give nor was any apology forthcoming. We tried the only special available (strawberry rhubarb pie) and the lime meringue pie. Both were disappointing. The crumble on the strawberry rhubarb pie was like a group of small, indestructible rocks. We could not break up the 10cm pieces with a fork -- and we tried! We were not provided a knife with dessert. The lime meringue pie tasted good but there was so little lime custard except at the very edge of the pie that it tasted almost exclusively of meringue.When I pointed these out to our server he simply asked if we would like something different. I pointed out that there was very little else available as two of the pies of the day were not available and he stood in silence. He did not seem to have any idea how the pies should be made or the quality that is normally associated with Clinton Street.Useful
====Review 4 ====
Rating: 2 star rating
I came here because the hotel breakfast at the Intercontinental was $S30 a pop, and I thought I could do better -- or at least the same for less. So I decided to go to Killiney Kopitiam but my Ang Moh instincts took over while passing the lower-rated Clinton Street Baking Co. They had muffins on display and I am partial to Western breakfasts. So I was sucked in. The first thing I saw from the menu was it was not going to be much less expensive. So maybe it'll be great? Same for more?I ordered what I thought was their signature omelette because it had their name on it. But it only comes with two add-in ingredients (besides the eggs, of course). And apparently nothing but those two plus the eggs because when the omelette arrived I was asked if I wanted salt, pepper, or catsup. The first sign of cooking mediocrity is the need to season it with any of those things after cooking, especially a $20 omelette. If I'd known I would have chosen bacon to go in there. Ugh. It was so bland. And dry. I regretted not asking for catsup. I regretted more not having salsa, which is the best cure for bland eggs.The hash browns, though, were remarkable. If it was a two-egg omelette instead of a three-egger I was pushing through I would have savored them more. My cappuccino was also well done. Final tally, $S33!At the hotel I could have bottomless juice, cappuccinos, an array of Japanese, Chinese, Indian, and Western breakfast items. Smoked salmon with capers. Fresh-made waffles. Croissants and danishes. Fresh fruit. I got less for more. I'm kind of amazed they didn't have oatmeal (or porridge) on the menu. My punishment for not going to the kopitiam.
====Review 5 ====
Rating: 2 star rating
That moment when you're in an entirely different country and you see they have your favorite NYC breakfast spot is in Singapore!!!! BUT it's no where near as good and your charged double the price. Very disappointing. :(
====Review 6 ====
Rating: 2 star rating
Breakfast was mediocre. Nothing crazy or wow about. Overpriced as well. Would I recommend? Sure, if there's nothing else on the block to eat. Otherwise, it's not worth a visit.
====Review 7 ====
Rating: 1 star rating
Everything should be half the price. For the same price you can get three times the quality at P.S. Cafe. Dishes with eggs were underseasoned and in lack of flavor. The 20 dollar omelet has an egg to stuffing ratio of 5:1. Not coming back. Coffee was good.
====Review 8 ====
Rating: 1 star rating
We went here for breakfast and we're not really impressed. They got one of our orders wrong, completely forgot another and had zero apologies for it all. Food is really mediocre for the price. All the egg dishes were well prepared but everything else on the plate was unappetizing. The biscuits were dry, the ham was leathery, the bread was stale. Would not recommend at all.
====Review 9 ====
Rating: 1 star rating
1 check-inThis was easily the worst experience we had anywhere on our entire trip.Left waiting for 20 minutes to place our order while the table next to us, seated after we arrived, placed and received their order. We were generally ignored by the staff--and at the end we paid 10% extra for their "service."Do yourself a favor and go elsewhere.Useful
====Review 10 ====
Rating: 1 star rating
1 check-inI had spinach egg for breakfast. It tasted absolutely bland, tasteless. The portion of the orange juice is way too little. Not worth the price .. It's not the breakfast to die as compared in states. Lastly, customer service was bad. Coffee is good tho.Useful
====Review 11 ====
--Cannot find ratings--
Copyright © 2004–2020 Yelp Inc. Yelp, , and related marks are registered trademarks of Yelp.
for review in reviewDict:
f.write("\n" + review[0] + "," + review[1] )
f.close()_____no_output_____
</code>
| {
"repository": "nicholasneo78/NLP-assignment",
"path": "Assignment 1/SourceCode/Web Scraping Yelp.ipynb",
"matched_keywords": [
"STAR",
"Salmon"
],
"stars": null,
"size": 11333,
"hexsha": "d089d17c834f72b1f0e7ccfef9a5684da1e4288e",
"max_line_length": 7621,
"avg_line_length": 73.1161290323,
"alphanum_fraction": 0.6837554046
} |
# Notebook from ofou/course-content
Path: tutorials/W0D4_Calculus/W0D4_Tutorial2.ipynb
<a href="https://colab.research.google.com/github/NeuromatchAcademy/course-content/blob/master/tutorials/W0D4_Calculus/W0D4_Tutorial2.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>[](https://kaggle.com/kernels/welcome?src=https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/tutorials/W0D4_Calculus/W0D4_Tutorial2.ipynb)_____no_output_____# Tutorial 2: Differential Equations
**Week 0, Day 4: Calculus**
**By Neuromatch Academy**
__Content creators:__ John S Butler, Arvind Kumar with help from Rebecca Brady
__Content reviewers:__ Swapnil Kumar, Sirisha Sripada, Matthew McCann, Tessy Tom
__Production editors:__ Matthew McCann, Ella Batty_____no_output_____**Our 2021 Sponsors, including Presenting Sponsor Facebook Reality Labs**
<p align='center'><img src='https://github.com/NeuromatchAcademy/widgets/blob/master/sponsors.png?raw=True'/></p>_____no_output_____---
# Tutorial Objectives
*Estimated timing of tutorial: 45 minutes*
A great deal of neuroscience can be modelled using differential equations, from gating channels to single neurons to a network of neurons to blood flow, to behaviour. A simple way to think about differential equations is they are equations that describe how something changes.
The most famous of these in neuroscience is the Nobel Prize winning Hodgkin Huxley equation, which describes a neuron by modelling the gating of each axon. But we will not start there; we will start a few steps back.
Differential Equations are mathematical equations that describe how something like population or a neuron changes over time. The reason why differential equations are so useful is they can generalise a process such that one equation can be used to describe many different outcomes.
The general form of a first order differential equation is:
\begin{align*}
\frac{d}{dt}y(t)&=f(t,y(t))\\
\end{align*}
which can be read as "the change in a process $y$ over time $t$ is a function $f$ of time $t$ and itself $y$". This might initially seem like a paradox as you are using a process $y$ you want to know about to describe itself, a bit like the MC Escher drawing of two hands painting [each other](https://en.wikipedia.org/wiki/Drawing_Hands). But that is the beauty of mathematics - this can be solved some of time, and when it cannot be solved exactly we can use numerical methods to estimate the answer (as we will see in the next tutorial).
In this tutorial, we will see how __differential equations are motivated by observations of physical responses.__ We will break down the population differential equation, then the integrate and fire model, which leads nicely into raster plots and frequency-current curves to rate models.
**Steps:**
- Get an intuitive understanding of a linear population differential equation (humans, not neurons)
- Visualize the relationship between the change in population and the population
- Breakdown the Leaky Integrate and Fire (LIF) differential equation
- Code the exact solution of an LIF for a constant input
- Visualize and listen to the response of the LIF for different inputs
_____no_output_____
<code>
# @title Video 1: Why do we care about differential equations?
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = 'https://player.bilibili.com/player.html?bvid={0}&page={1}'.format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id="BV1v64y197bW", width=854, height=480, fs=1)
print('Video available at https://www.bilibili.com/video/{0}'.format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="LhX-mUd8lPo", width=854, height=480, fs=1, rel=0)
print('Video available at https://youtube.com/watch?v=' + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
display(out)_____no_output_____
</code>
---
# Setup_____no_output_____
<code>
# Imports
import numpy as np
import matplotlib.pyplot as plt_____no_output_____# @title Figure Settings
import IPython.display as ipd
from matplotlib import gridspec
import ipywidgets as widgets # interactive display
%config InlineBackend.figure_format = 'retina'
# use NMA plot style
plt.style.use("https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/nma.mplstyle")
my_layout = widgets.Layout()_____no_output_____# @title Plotting Functions
def plot_dPdt(alpha=.3):
""" Plots change in population over time
Args:
alpha: Birth Rate
Returns:
A figure two panel figure
left panel: change in population as a function of population
right panel: membrane potential as a function of time
"""
with plt.xkcd():
time=np.arange(0, 10 ,0.01)
fig = plt.figure(figsize=(12,4))
gs = gridspec.GridSpec(1, 2)
## dpdt as a fucntion of p
plt.subplot(gs[0])
plt.plot(np.exp(alpha*time), alpha*np.exp(alpha*time))
plt.xlabel(r'Population $p(t)$ (millions)')
plt.ylabel(r'$\frac{d}{dt}p(t)=\alpha p(t)$')
## p exact solution
plt.subplot(gs[1])
plt.plot(time, np.exp(alpha*time))
plt.ylabel(r'Population $p(t)$ (millions)')
plt.xlabel('time (years)')
plt.show()
def plot_V_no_input(V_reset=-75):
"""
Args:
V_reset: Reset Potential
Returns:
A figure two panel figure
left panel: change in membrane potential as a function of membrane potential
right panel: membrane potential as a function of time
"""
E_L=-75
tau_m=10
t=np.arange(0,100,0.01)
V= E_L+(V_reset-E_L)*np.exp(-(t)/tau_m)
V_range=np.arange(-90,0,1)
dVdt=-(V_range-E_L)/tau_m
with plt.xkcd():
time=np.arange(0, 10, 0.01)
fig = plt.figure(figsize=(12, 4))
gs = gridspec.GridSpec(1, 2)
plt.subplot(gs[0])
plt.plot(V_range,dVdt)
plt.hlines(0,min(V_range),max(V_range), colors='black', linestyles='dashed')
plt.vlines(-75, min(dVdt), max(dVdt), colors='black', linestyles='dashed')
plt.plot(V_reset,-(V_reset - E_L)/tau_m, 'o', label=r'$V_{reset}$')
plt.text(-50, 1, 'Positive')
plt.text(-50, -2, 'Negative')
plt.text(E_L - 1, max(dVdt), r'$E_L$')
plt.legend()
plt.xlabel('Membrane Potential V (mV)')
plt.ylabel(r'$\frac{dV}{dt}=\frac{-(V(t)-E_L)}{\tau_m}$')
plt.subplot(gs[1])
plt.plot(t,V)
plt.plot(t[0],V_reset,'o')
plt.ylabel(r'Membrane Potential $V(t)$ (mV)')
plt.xlabel('time (ms)')
plt.ylim([-95, -60])
plt.show()
## LIF PLOT
def plot_IF(t, V,I,Spike_time):
"""
Args:
t : time
V : membrane Voltage
I : Input
Spike_time : Spike_times
Returns:
figure with three panels
top panel: Input as a function of time
middle panel: membrane potential as a function of time
bottom panel: Raster plot
"""
with plt.xkcd():
fig = plt.figure(figsize=(12, 4))
gs = gridspec.GridSpec(3, 1, height_ratios=[1, 4, 1])
# PLOT OF INPUT
plt.subplot(gs[0])
plt.ylabel(r'$I_e(nA)$')
plt.yticks(rotation=45)
plt.hlines(I,min(t),max(t),'g')
plt.ylim((2, 4))
plt.xlim((-50, 1000))
# PLOT OF ACTIVITY
plt.subplot(gs[1])
plt.plot(t,V)
plt.xlim((-50, 1000))
plt.ylabel(r'$V(t)$(mV)')
# PLOT OF SPIKES
plt.subplot(gs[2])
plt.ylabel(r'Spike')
plt.yticks([])
plt.scatter(Spike_time, 1 * np.ones(len(Spike_time)), color="grey", marker=".")
plt.xlim((-50, 1000))
plt.xlabel('time(ms)')
plt.show()
## Plotting the differential Equation
def plot_dVdt(I=0):
"""
Args:
I : Input Current
Returns:
figure of change in membrane potential as a function of membrane potential
"""
with plt.xkcd():
E_L = -75
tau_m = 10
V = np.arange(-85, 0, 1)
g_L = 10.
fig = plt.figure(figsize=(6, 4))
plt.plot(V,(-(V-E_L) + I*10) / tau_m)
plt.hlines(0, min(V), max(V), colors='black', linestyles='dashed')
plt.xlabel('V (mV)')
plt.ylabel(r'$\frac{dV}{dt}$')
plt.show()_____no_output_____# @title Helper Functions
## EXACT SOLUTION OF LIF
def Exact_Integrate_and_Fire(I,t):
"""
Args:
I : Input Current
t : time
Returns:
Spike : Spike Count
Spike_time : Spike time
V_exact : Exact membrane potential
"""
Spike = 0
tau_m = 10
R = 10
t_isi = 0
V_reset = E_L = -75
V_exact = V_reset * np.ones(len(t))
V_th = -50
Spike_time = []
for i in range(0, len(t)):
V_exact[i] = E_L + R*I + (V_reset - E_L - R*I) * np.exp(-(t[i]-t_isi)/tau_m)
# Threshold Reset
if V_exact[i] > V_th:
V_exact[i-1] = 0
V_exact[i] = V_reset
t_isi = t[i]
Spike = Spike+1
Spike_time = np.append(Spike_time, t[i])
return Spike, Spike_time, V_exact_____no_output_____
</code>
---
# Section 1: Population differential equation_____no_output_____
<code>
# @title Video 2: Population differential equation
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = 'https://player.bilibili.com/player.html?bvid={0}&page={1}'.format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id="BV1pg41137CU", width=854, height=480, fs=1)
print('Video available at https://www.bilibili.com/video/{0}'.format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="czgGyoUsRoQ", width=854, height=480, fs=1, rel=0)
print('Video available at https://youtube.com/watch?v=' + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
display(out)_____no_output_____
</code>
This video covers our first example of a differential equation: a differential equation which models the change in population.
<details>
<summary> <font color='blue'>Click here for text recap of video </font></summary>
To get an intuitive feel of a differential equations, we will start with a population differential equation, which models the change in population [1], that is human population not neurons, we will get to neurons later. Mathematically it is written like:
\begin{align*}
\\
\frac{d}{dt}\,p(t) &= \alpha p(t),\\
\end{align*}
where $p(t)$ is the population of the world and $\alpha$ is a parameter representing birth rate.
Another way of thinking about the models is that the equation
\begin{align*}
\\
\frac{d}{dt}\,p(t) &= \alpha p(t),\\
\text{can be written as:}\\
\text{"Change in Population"} &= \text{ "Birth rate times Current population."}
\end{align*}
The equation is saying something reasonable maybe not the perfect model but a good start.
</details>_____no_output_____### Think! 1.1: Interpretating the behavior of a linear population equation
Using the plot below of change of population $\frac{d}{dt} p(t) $ as a function of population $p(t)$ with birth-rate $\alpha=0.3$, discuss the following questions:
1. Why is the population differential equation known as a linear differential equation?
2. How does population size affect the rate of change of the population?
_____no_output_____
<code>
# @markdown Execute the code to plot the rate of change of population as a function of population
p = np.arange(0, 100, 0.1)
with plt.xkcd():
dpdt = 0.3*p
fig = plt.figure(figsize=(6, 4))
plt.plot(p, dpdt)
plt.xlabel(r'Population $p(t)$ (millions)')
plt.ylabel(r'$\frac{d}{dt}p(t)=\alpha p(t)$')
plt.show()_____no_output_____# to_remove explanation
"""
1. The plot of $\frac{dp}{dt}$ is a line, which is why the differential
equation is known as a linear differential equation.
2. As the population increases, the change of population increases. A
population of 20 has a change of 6 while a population of 100 has a change of
30. This makes sense - the larger the population the larger the change.
"""_____no_output_____
</code>
## Section 1.1: Exact solution of the population equation_____no_output_____### Section 1.1.1: Initial condition
The linear population differential equation is known as an initial value differential equation because we need an initial population value to solve it. Here we will set our initial population at time 0 to 1:
\begin{align*}
&p(0)=1.\\
\end{align*}
Different initial conditions will lead to different answers, but they will not change the differential equation. This is one of the strengths of a differential equation. _____no_output_____### Section 1.1.2: Exact Solution
To calculate the exact solution of a differential equation, we must integrate both sides. Instead of numerical integration (as you delved into in the last tutorial), we will first try to solve the differential equations using analytical integration. As with derivatives, we can find analytical integrals of simple equations by consulting [a list](https://en.wikipedia.org/wiki/Lists_of_integrals). We can then get integrals for more complex equations using some mathematical tricks - the harder the equation the more obscure the trick.
The linear population equation
\begin{align*}
\frac{d}{dt}\,p(t) &= \alpha p(t),\\\\
p(0)=P_0,\\
\end{align*}
has the exact solution:
\begin{align*}
p(t)&=P_0e^{\alpha t}.\\
\end{align*}
The exact solution written in words is:
\begin{align*}
\text{"Population"}&=\text{"grows/declines exponentially as a function of time and birth rate"}.\\
\end{align*}
Most differential equations do not have a known exact solution, so in the next tutorial on numerical methods we will show how the solution can be estimated.
A small aside: a good deal of progress in mathematics was due to mathematicians writing taunting letters to each other saying they had a trick that could solve something better than everyone else. So do not worry too much about the tricks._____no_output_____#### Example Exact Solution of the Population Equation
Let's consider the population differential equation with a birth rate $\alpha=0.3$:
\begin{align*}
\frac{d}{dt}\,p(t) = 0.3 p(t),\\
\text{with the initial condition}\\
p(0)=1.\\
\end{align*}
It has an exact solution
\begin{align*}
\\
p(t)=e^{0.3 t}.
\end{align*}
_____no_output_____
<code>
# @markdown Execute code to plot the exact solution
t = np.arange(0, 10, 0.1) # Time from 0 to 10 years in 0.1 steps
with plt.xkcd():
p = np.exp(0.3 * t)
fig = plt.figure(figsize=(6, 4))
plt.plot(t, p)
plt.ylabel('Population (millions)')
plt.xlabel('time (years)')
plt.show()_____no_output_____
</code>
## Section 1.2: Parameters of the differential equation
*Estimated timing to here from start of tutorial: 12 min*
One of the goals when designing a differential equation is to make it generalisable. Which means that the differential equation will give reasonable solutions for different countries with different birth rates $\alpha$.
_____no_output_____### Interactive Demo 1.2: Interactive Parameter Change
Play with the widget to see the relationship between $\alpha$ and the population differential equation as a function of population (left-hand side), and the population solution as a function of time (right-hand side). Pay close attention to the transition point from positive to negative.
How do changing parameters of the population equation affect the outcome?
1. What happens when $\alpha < 0$?
2. What happens when $\alpha > 0$?
3. What happens when $\alpha = 0$?_____no_output_____
<code>
# @markdown Make sure you execute this cell to enable the widget!
my_layout.width = '450px'
@widgets.interact(
alpha=widgets.FloatSlider(.3, min=-1., max=1., step=.1, layout=my_layout)
)
def Pop_widget(alpha):
plot_dPdt(alpha=alpha)
plt.show()_____no_output_____# to_remove explanation
"""
1. Negative values of alpha result in an exponential decrease to 0 a stable solution.
2. Positive Values of alpha in an exponential increases to infinity.
3. Alpha equal to 0 is a unique point known as an equilibrium point when the
dp/dt=0 and there is no change in population. This is known as a stable point.
"""_____no_output_____
</code>
The population differential equation is an over-simplification and has some very obvious limitations:
1. Population growth is not exponential as there are limited number of resources so the population will level out at some point.
2. It does not include any external factors on the populations like weather, predators and preys.
These kind of limitations can be addressed by extending the model.
While it might not seem that the population equation has direct relevance to neuroscience, a similar equation is used to describe the accumulation of evidence for decision making. This is known as the Drift Diffusion Model and you will see in more detail in the Linear System day in Neuromatch (W2D2).
Another differential equation that is similar to the population equation is the Leaky Integrate and Fire model which you may have seen in the python pre-course materials on W0D1 and W0D2. It will turn up later in Neuromatch as well. Below we will delve in the motivation of the differential equation._____no_output_____---
# Section 2: The leaky integrate and fire model
_____no_output_____
<code>
# @title Video 3: The leaky integrate and fire model
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = 'https://player.bilibili.com/player.html?bvid={0}&page={1}'.format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id="BV1rb4y1C79n", width=854, height=480, fs=1)
print('Video available at https://www.bilibili.com/video/{0}'.format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="ZfWO6MLCa1s", width=854, height=480, fs=1, rel=0)
print('Video available at https://youtube.com/watch?v=' + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
display(out)_____no_output_____
</code>
This video covers the Leaky Integrate and Fire model (a linear differential equation which describes the membrane potential of a single neuron).
<details>
<summary> <font color='blue'>Click here for text recap of full LIF equation from video </font></summary>
The Leaky Integrate and Fire Model is a linear differential equation that describes the membrane potential ($V$) of a single neuron which was proposed by Louis Édouard Lapicque in 1907 [2].
The subthreshold membrane potential dynamics of a LIF neuron is described by
\begin{align}
\tau_m\frac{dV}{dt} = -(V-E_L) + R_mI\,
\end{align}
where $\tau_m$ is the time constant, $V$ is the membrane potential, $E_L$ is the resting potential, $R_m$ is membrane resistance, and $I$ is the external input current.
</details>
In the next few sections, we will break down the full LIF equation and then build it back up to get an intuitive feel of the different facets of the differential equation.
_____no_output_____## Section 2.1: LIF without input
*Estimated timing to here from start of tutorial: 18 min*
As seen in the video, we will first model an LIF neuron without input, which results in the equation:
\begin{align}
\frac{dV}{dt} &= \frac{-(V-E_L)}{\tau_m}.\\
\end{align}
where $\tau_m$ is the time constant, $V$ is the membrane potential, and $E_L$ is the resting potential.
<details>
<summary> <font color='blue'>Click here for further details (from video) </font></summary>
Removing the input gives the equation
\begin{align}
\tau_m\frac{dV}{dt} &= -V+E_L,\\
\end{align}
which can be written in words as:
\begin{align}
\begin{matrix}\text{"Time constant multiplied by the} \\ \text{change in membrane potential"}\end{matrix}&=\begin{matrix}\text{"Minus Current} \\ \text{membrane potential"} \end{matrix}+
\begin{matrix}\text{"resting potential"}\end{matrix}.\\
\end{align}
The equation can be re-arranged to look even more like the population equation:
\begin{align}
\frac{dV}{dt} &= \frac{-(V-E_L)}{\tau_m}.\\
\end{align}
</details>
_____no_output_____### Think! 2.1: Effect on membrane potential $V$ on the LIF model
The plot the below shows the change in membrane potential $\frac{dV}{dt}$ as a function of membrane potential $V$ with the parameters set as:
* `E_L = -75`
* `V_reset = -50`
* `tau_m = 10.`
1. What is the effect on $\frac{dV}{dt}$ when $V>-75$ mV?
2. What is the effect on $\frac{dV}{dt}$ when $V<-75$ mV
3. What is the effect on $\frac{dV}{dt}$ when $V=-75$ mV?_____no_output_____
<code>
# @markdown Make sure you execute this cell to plot the relationship between dV/dt and V
# Parameter definition
E_L = -75
tau_m = 10
# Range of Values of V
V = np.arange(-90, 0, 1)
dV = -(V - E_L) / tau_m
with plt.xkcd():
fig = plt.figure(figsize=(6, 4))
plt.plot(V, dV)
plt.hlines(0, min(V), max(V), colors='black', linestyles='dashed')
plt.vlines(-75, min(dV), max(dV), colors='black', linestyles='dashed')
plt.text(-50, 1, 'Positive')
plt.text(-50, -2, 'Negative')
plt.text(E_L, max(dV) + 1, r'$E_L$')
plt.xlabel(r'$V(t)$ (mV)')
plt.ylabel(r'$\frac{dV}{dt}=\frac{-(V-E_L)}{\tau_m}$')
plt.ylim(-8, 2)
plt.show()_____no_output_____# to_remove explanation
"""
1. For $V>-75$ mV, the derivative is negative.
2. For $V<-75$ mV, the derivative is positive.
3. For $V=-75$ mV, the derivative is equal to $0$ is and a stable point when nothing changes.
"""_____no_output_____
</code>
### Section 2.1.1: Exact Solution of the LIF model without input
The LIF model has the exact solution:
\begin{align*}
V(t)=&\ E_L+(V_{reset}-E_L)e^{\frac{-t}{\tau_m}}\\
\end{align*}
where $\tau_m$ is the time constant, $V$ is the membrane potential, $E_L$ is the resting potential, and $V_{reset}$ is the initial membrane potential.
<details>
<summary> <font color='blue'>Click here for further details (from video) </font></summary>
Similar to the population equation, we need an initial membrane potential at time $0$ to solve the LIF model.
With this equation
\begin{align}
\frac{dV}{dt} &= \frac{-(V-E_L)}{\tau_m}\,\\
V(0)&=V_{reset},
\end{align}
where is $V_{reset}$ is called the reset potential.
The LIF model has the exact solution:
\begin{align*}
V(t)=&\ E_L+(V_{reset}-E_L)e^{\frac{-t}{\tau_m}}\\
\text{ which can be written as: }\\
\begin{matrix}\text{"Current membrane} \\ \text{potential}"\end{matrix}=&\text{"Resting potential"}+\begin{matrix}\text{"Reset potential minus resting potential} \\ \text{times exponential with rate one over time constant."}\end{matrix}\\
\end{align*}
</details>_____no_output_____#### Interactive Demo 2.1.1: Initial Condition $V_{reset}$
This exercise is to get an intuitive feel of how the different initial conditions $V_{reset}$ impacts the differential equation of the LIF and the exact solution for the equation:
\begin{align}
\frac{dV}{dt} &= \frac{-(V-E_L)}{\tau_m}\,\\
\end{align}
with the parameters set as:
* `E_L = -75,`
* `tau_m = 10.`
The panel on the left-hand side plots the change in membrane potential $\frac{dV}{dt}$ as a function of membrane potential $V$ and right-hand side panel plots the exact solution $V$ as a function of time $t,$ the green dot in both panels is the reset potential $V_{reset}$.
Pay close attention to when $V_{reset}=E_L=-75$mV.
1. How does the solution look with initial values of $V_{reset} < -75$?
2. How does the solution look with initial values of $V_{reset} > -75$?
3. How does the solution look with initial values of $V_{reset} = -75$?
_____no_output_____
<code>
#@markdown Make sure you execute this cell to enable the widget!
my_layout.width = '450px'
@widgets.interact(
V_reset=widgets.FloatSlider(-77., min=-91., max=-61., step=2,
layout=my_layout)
)
def V_reset_widget(V_reset):
plot_V_no_input(V_reset)_____no_output_____# to_remove explanation
"""
1. Initial Values of $V_{reset} < -75$ result in the solution increasing to
-75mV because $\frac{dV}{dt} > 0$.
2. Initial Values of $V_{reset} > -75$ result in the solution decreasing to
-75mV because $\frac{dV}{dt} < 0$.
3. Initial Values of $V_{reset} = -75$ result in a constant $V = -75$ mV
because $\frac{dV}{dt} = 0$ (Stable point).
"""_____no_output_____
</code>
## Section 2.2: LIF with input
*Estimated timing to here from start of tutorial: 24 min*
We will re-introduce the input $I$ and membrane resistance $R_m$ giving the original equation:
\begin{align}
\tau_m\frac{dV}{dt} = -(V-E_L) + \color{blue}{R_mI}\,
\end{align}
The input can be other neurons or sensory information._____no_output_____### Interactive Demo 2.2: The Impact of Input
The interactive plot below manipulates $I$ in the differential equation.
- With increasing input, how does the $\frac{dV}{dt}$ change? How would this impact the solution? _____no_output_____
<code>
# @markdown Make sure you execute this cell to enable the widget!
my_layout.width = '450px'
@widgets.interact(
I=widgets.FloatSlider(3., min=0., max=20., step=2,
layout=my_layout)
)
def Pop_widget(I):
plot_dVdt(I=I)
plt.show()_____no_output_____# to_remove explanation
"""
dV/dt becomes bigger and less of it is below 0. This means the solution will increase well beyond what is bioligically plausible
"""_____no_output_____
</code>
### Section 2.2.1: LIF exact solution
The LIF with a constant input has a known exact solution:
\begin{align*}
V(t)=&\ E_L+R_mI+(V_{reset}-E_L-R_mI)e^{\frac{-t}{\tau_m}}\\
\text{which is written as:}\\
\begin{matrix}\text{"Current membrane} \\ \text{potential"}\end{matrix}=&\text{"Resting potential"}+\begin{matrix}\text{"Reset potential minus resting potential} \\ \text{times exponential with rate one over time constant." }\end{matrix}\\
\end{align*}_____no_output_____The plot below shows the exact solution of the membrane potential with the parameters set as:
* `V_reset = -75,`
* `E_L = -75,`
* `tau_m = 10,`
* `R_m = 10,`
* `I = 10.`
Ask yourself, does the result make biological sense? If not, what would you change? We'll delve into this in the next section_____no_output_____
<code>
# @markdown Make sure you execute this cell to see the exact solution
dt = 0.5
t_rest = 0
t = np.arange(0, 1000, dt)
tau_m = 10
R_m = 10
V_reset = E_L = -75
I = 10
V = E_L + R_m*I + (V_reset - E_L - R_m*I) * np.exp(-(t)/tau_m)
with plt.xkcd():
fig = plt.figure(figsize=(6, 4))
plt.plot(t,V)
plt.ylabel('V (mV)')
plt.xlabel('time (ms)')
plt.show()_____no_output_____
</code>
## Section 2.3: Maths is one thing, but neuroscience matters
*Estimated timing to here from start of tutorial: 30 min*_____no_output_____
<code>
# @title Video 4: Adding firing to the LIF
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = 'https://player.bilibili.com/player.html?bvid={0}&page={1}'.format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id="BV1gX4y1P7pZ", width=854, height=480, fs=1)
print('Video available at https://www.bilibili.com/video/{0}'.format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="rLQk-vXRaX0", width=854, height=480, fs=1, rel=0)
print('Video available at https://youtube.com/watch?v=' + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
display(out)_____no_output_____
</code>
This video first recaps the introduction of input to the leaky integrate and fire model and then delves into how we add spiking behavior (or firing) to the model.
<details>
<summary> <font color='blue'>Click here for text recap of video </font></summary>
While the mathematics of the exact solution is exact, it is not biologically valid as a neuron spikes and definitely does not plateau at a very positive value.
To model the firing of a spike, we must have a threshold voltage $V_{th}$ such that if the voltage $V(t)$ goes above it, the neuron spikes
$$V(t)>V_{th}.$$
We must record the time of spike $t_{isi}$ and count the number of spikes
$$t_{isi}=t, $$
$$𝑆𝑝𝑖𝑘𝑒=𝑆𝑝𝑖𝑘𝑒+1.$$
Then reset the membrane voltage $V(t)$
$$V(t_{isi} )=V_{Reset}.$$
To take into account the spike the exact solution becomes:
\begin{align*}
V(t)=&\ E_L+R_mI+(V_{reset}-E_L-R_mI)e^{\frac{-(t-t_{isi})}{\tau_m}},&\qquad V(t)<V_{th} \\
V(t)=&V_{reset},&\qquad V(t)>V_{th}\\
Spike=&Spike+1,&\\
t_{isi}=&t,\\
\end{align*}
while this does make the neuron spike, it introduces a discontinuity which is not as elegant mathematically as it could be, but it gets results so that is good.
</detail>_____no_output_____### Interactive Demo 2.3.1: Input on spikes
This exercise show the relationship between firing rate and the Input for exact solution `V` of the LIF:
$$
V(t)=\ E_L+R_mI+(V_{reset}-E_L-R_mI)e^{\frac{-(t-t_{isi})}{\tau_m}},
$$
with the parameters set as:
* `V_reset = -75,`
* `E_L = -75,`
* `tau_m = 10,`
* `R_m = 10.`
Below is a figure with three panels;
* the top panel is the input, $I,$
* the middle panel is the membrane potential $V(t)$. To illustrate the spike, $V(t)$ is set to $0$ and then reset to $-75$ mV when there is a spike.
* the bottom panel is the raster plot with each dot indicating a spike.
First, as electrophysiologist normally listen to spikes when conducting experiments, listen to the music of the firing rate for a single value of $I$. (Note the audio doesn't work in some browsers so don't worry about it if you can't hear anything) _____no_output_____
<code>
# @markdown Make sure you execute this cell to be able to hear the neuron
I = 3
t = np.arange(0, 1000, dt)
Spike, Spike_time, V = Exact_Integrate_and_Fire(I, t)
plot_IF(t, V, I, Spike_time)
ipd.Audio(V, rate=len(V))_____no_output_____
</code>
Manipulate the input into the LIF to see the impact of input on the firing pattern (rate).
* What is the effect of $I$ on spiking?
* Is this biologically valid?_____no_output_____
<code>
# @markdown Make sure you execute this cell to enable the widget!
my_layout.width = '450px'
@widgets.interact(
I=widgets.FloatSlider(3, min=2.0, max=4., step=.1,
layout=my_layout)
)
def Pop_widget(I):
Spike, Spike_time, V = Exact_Integrate_and_Fire(I, t)
plot_IF(t, V, I, Spike_time)_____no_output_____# to_remove explanation
"""
1. As $I$ increases, the number of spikes increases.
2. No, as there is a limit to the number of spikes due to a refractory period, which is not accounted for in this model.
"""_____no_output_____
</code>
## Section 2.4 Firing Rate as a function of Input
*Estimated timing to here from start of tutorial: 38 min*
The firing frequency of a neuron plotted as a function of current is called an input-output curve (F–I curve). It is also known as a transfer function, which you came across in the previous tutorial. This function is one of the starting points for the rate model, which extends from modelling single neurons to the firing rate of a collection of neurons.
By fitting this to a function, we can start to generalise the firing pattern of many neurons, which can be used to build rate models. This will be discussed later in Neuromatch. _____no_output_____
<code>
# @markdown *Execture this cell to visualize the FI curve*
I_range = np.arange(2.0, 4.0, 0.1)
Spike_rate = np.ones(len(I_range))
for i, I in enumerate(I_range):
Spike_rate[i], _, _ = Exact_Integrate_and_Fire(I, t)
with plt.xkcd():
fig = plt.figure(figsize=(6, 4))
plt.plot(I_range,Spike_rate)
plt.xlabel('Input Current (nA)')
plt.ylabel('Spikes per Second (Hz)')
plt.show()_____no_output_____
</code>
The LIF model is a very nice differential equation to start with in computational neuroscience as it has been used as a building block for many papers that simulate neuronal response.
__Strengths of LIF model:__
+ Has an exact solution;
+ Easy to interpret;
+ Great to build network of neurons.
__Weaknesses of the LIF model:__
- Spiking is a discontinuity;
- Abstraction from biology;
- Cannot generate different spiking patterns.
_____no_output_____---
# Summary
*Estimated timing of tutorial: 45 min*_____no_output_____
<code>
# @title Video 5: Summary
from ipywidgets import widgets
out2 = widgets.Output()
with out2:
from IPython.display import IFrame
class BiliVideo(IFrame):
def __init__(self, id, page=1, width=400, height=300, **kwargs):
self.id=id
src = 'https://player.bilibili.com/player.html?bvid={0}&page={1}'.format(id, page)
super(BiliVideo, self).__init__(src, width, height, **kwargs)
video = BiliVideo(id="BV1jV411x7t9", width=854, height=480, fs=1)
print('Video available at https://www.bilibili.com/video/{0}'.format(video.id))
display(video)
out1 = widgets.Output()
with out1:
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="VzwLAW5p4ao", width=854, height=480, fs=1, rel=0)
print('Video available at https://youtube.com/watch?v=' + video.id)
display(video)
out = widgets.Tab([out1, out2])
out.set_title(0, 'Youtube')
out.set_title(1, 'Bilibili')
display(out)_____no_output_____
</code>
In this tutorial, we have seen two differential equations, the population differential equations and the leaky integrate and fire model.
We learned about:
* The motivation for differential equations.
* An intuitive relationship between the solution and the form of the differential equation.
* How different parameters of the differential equation impact the solution.
* The strengths and limitations of the simple differential equations.
_____no_output_____---
# Links to Neuromatch Days
Differential equations turn up in a number of different Neuromatch days:
* The LIF model is discussed in more details in Model Types (Week 1 Day 1) and Real Neurons (Week 2 Day 3).
* Drift Diffusion model which is a differential equation for decision making is discussed in Linear Systems (Week 2 Day 2).
* Systems of differential equations are discussed in Linear Systems (Week 2 Day 2) and Dynamic Networks (Week 2 Day 4).
---
# References
1. Lotka, A. L, (1920) Analytical note on certain rhythmic relations inorganic systems.Proceedings of the National Academy of Sciences,6(7):410–415,1920.
2. Brunel N, van Rossum MC. Lapicque's 1907 paper: from frogs to integrate-and-fire. Biol Cybern. 2007 Dec;97(5-6):337-9. doi: 10.1007/s00422-007-0190-0. Epub 2007 Oct 30. PMID: 17968583.
# Bibliography
1. Dayan, P., & Abbott, L. F. (2001). Theoretical neuroscience: computational and mathematical modeling of neural systems. Computational Neuroscience Series.
2. Strogatz, S. Nonlinear dynamics and chaos: with applications to physics, biology, chemistry, and engineering (studies in nonlinearity), Westview Press; 2 edition (29 July 2014)
## Supplemental Popular Reading List
1. Lindsay, G. (2021). Models of the Mind: How Physics, Engineering and Mathematics Have Shaped Our Understanding of the Brain. Bloomsbury Publishing.
2. Strogatz, S. (2004). Sync: The emerging science of spontaneous order. Penguin UK.
## Popular Podcast
1. Strogatz, S. (Host). (2020-), Joy of X https://www.quantamagazine.org/tag/the-joy-of-x/ Quanta Magazine
_____no_output_____
| {
"repository": "ofou/course-content",
"path": "tutorials/W0D4_Calculus/W0D4_Tutorial2.ipynb",
"matched_keywords": [
"biology",
"neuroscience"
],
"stars": null,
"size": 57242,
"hexsha": "d08ae990db4ab0074c5f23c150e6aaa4bbe88b89",
"max_line_length": 552,
"avg_line_length": 36.1148264984,
"alphanum_fraction": 0.5792774536
} |
# Notebook from SuryaReginaAA/Learn
Path: NLP/8.Using Named Entity Recognition (NER).ipynb
# Using Named Entity Recognition (NER)_____no_output_____**Named entities** are noun phrases that refer to specific locations, people, organizations, and so on. With **named entity recognition**, you can find the named entities in your texts and also determine what kind of named entity they are.
Here’s the list of named entity types from the <a href = "https://www.nltk.org/book/ch07.html#sec-ner">NLTK book</a>:_____no_output_____<table>
<tr><th>NEtype</th> <th>Examples</th></tr>
<tr><td>ORGANIZATION</td> <td>Georgia-Pacific Corp., WHO</td></tr>
<tr><td>PERSON</td> <td>Eddy Bonte, President Obama</td></tr>
<tr><td>LOCATION</td> <td>Murray River, Mount Everest</td></tr>
<tr><td>DATE</td> <td>June, 2008-06-29</td></tr>
<tr><td>TIME</td> <td>two fifty a m, 1:30 p.m.</td></tr>
<tr><td>MONEY</td> <td>175 million Canadian dollars, GBP 10.40</td></tr>
<tr><td>PERCENT</td> <td>twenty pct, 18.75 %</td></tr>
<tr><td>FACILITY</td> <td>Washington Monument, Stonehenge</td></tr>
<tr><td>GPE</td> <td>South East Asia, Midlothian</td></tr>
<table>
You can use nltk.ne_chunk() to recognize named entities. Let’s use lotr_pos_tags again to test it out:_____no_output_____
<code>
import nltk
from nltk.tokenize import word_tokenize_____no_output_____lotr_quote = "It's a dangerous business, Frodo, going out your door."_____no_output_____words_in_lotr_quote = word_tokenize(lotr_quote)
print(words_in_lotr_quote)['It', "'s", 'a', 'dangerous', 'business', ',', 'Frodo', ',', 'going', 'out', 'your', 'door', '.']
lotr_pos_tags = nltk.pos_tag(words_in_lotr_quote)
print(lotr_pos_tags)[('It', 'PRP'), ("'s", 'VBZ'), ('a', 'DT'), ('dangerous', 'JJ'), ('business', 'NN'), (',', ','), ('Frodo', 'NNP'), (',', ','), ('going', 'VBG'), ('out', 'RP'), ('your', 'PRP$'), ('door', 'NN'), ('.', '.')]
tree = nltk.ne_chunk(lotr_pos_tags)_____no_output_____
</code>
Now take a look at the visual representation:_____no_output_____
<code>
tree.draw()_____no_output_____
</code>
Here’s what you get:
_____no_output_____See how Frodo has been tagged as a PERSON? You also have the option to use the parameter binary=True if you just want to know what the named entities are but not what kind of named entity they are:_____no_output_____
<code>
tree = nltk.ne_chunk(lotr_pos_tags, binary=True)
tree.draw()_____no_output_____
</code>
Now all you see is that Frodo is an NE:_____no_output_____That’s how you can identify named entities! But you can take this one step further and extract named entities directly from your text. Create a string from which to extract named entities. You can use this quote from <a href = "https://en.wikipedia.org/wiki/The_War_of_the_Worlds" >The War of the Worlds</a>:_____no_output_____
<code>
quote = """
Men like Schiaparelli watched the red planet—it is odd, by-the-bye, that
for countless centuries Mars has been the star of war—but failed to
interpret the fluctuating appearances of the markings they mapped so well.
All that time the Martians must have been getting ready.
During the opposition of 1894 a great light was seen on the illuminated
part of the disk, first at the Lick Observatory, then by Perrotin of Nice,
and then by other observers. English readers heard of it first in the
issue of Nature dated August 2."""_____no_output_____
</code>
Now create a function to extract named entities:_____no_output_____
<code>
def extract_ne(quote):
words = word_tokenize(quote, language='english')
tags = nltk.pos_tag(words)
tree = nltk.ne_chunk(tags, binary=True)
tree.draw()
return set(
" ".join(i[0] for i in t)
for t in tree
if hasattr(t, "label") and t.label() == "NE"
)_____no_output_____
</code>
With this function, you gather all named entities, with no repeats. In order to do that, you tokenize by word, apply part of speech tags to those words, and then extract named entities based on those tags. Because you included binary=True, the named entities you’ll get won’t be labeled more specifically. You’ll just know that they’re named entities.
Take a look at the information you extracted:_____no_output_____
<code>
extract_ne(quote)_____no_output_____
</code>
You missed the city of Nice, possibly because NLTK interpreted it as a regular English adjective, but you still got the following:
1.**An institution**: 'Lick Observatory'
2.**A planet**: 'Mars'
3.**A publication**: 'Nature'
4.**People**: 'Perrotin', 'Schiaparelli'_____no_output_____
| {
"repository": "SuryaReginaAA/Learn",
"path": "NLP/8.Using Named Entity Recognition (NER).ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 7581,
"hexsha": "d08b7a6e714923738dfb0847dd39352295877527",
"max_line_length": 360,
"avg_line_length": 27.6678832117,
"alphanum_fraction": 0.5532251682
} |
# Notebook from kern-lab/popGenMachineLearningExamples
Path: demographicModelSelectionExample.ipynb
# Using a random forest for demographic model selection
In Schrider and Kern (2017) we give a toy example of demographic model selection via supervised machine learning in Figure Box 1. Following a discussion on twitter, Vince Buffalo had the great idea of our providing a simple example of supervised ML in population genetics using a jupyter notebook; this notebook aims to serve that purpose by showing you exactly how we produced that figure in our paper_____no_output_____## Preliminaries
The road map here will be to 1) do some simulation of three demographic models, 2) to train a classifier to distinguish among those models, 3) test that classifier with new simulation data, and 4) to graphically present how well our trained classifier works.
To do this we will use coalescent simulations as implemented in Dick Hudson's well known `ms` software and for the ML side of things we will use the `scikit-learn` package. Let's start by installing these dependencies (if you don't have them installed already)_____no_output_____### Install, and compile `ms`
We have put a copy of the `ms` tarball in this repo, so the following should work upon cloning_____no_output_____
<code>
#untar and compile ms and sample_stats
!tar zxf ms.tar.gz; cd msdir; gcc -o ms ms.c streec.c rand1.c -lm; gcc -o sample_stats sample_stats.c tajd.c -lm
#I get three compiler warnings from ms, but everything should be fine
#now I'll just move the programs into the current working dir
!mv msdir/ms . ; mv msdir/sample_stats .;_____no_output_____
</code>
### Install `scikit-learn`
If you use anaconda, you may already have these modules installed, but if not you can install with either of the following_____no_output_____
<code>
!conda install scikit-learn --yes_____no_output_____
</code>
or if you don't use `conda`, you can use `pip` to install scikit-learn with_____no_output_____
<code>
!pip install -U scikit-learn_____no_output_____
</code>
# Step 1: create a training set and a testing set
We will create a training set using simulations from three different demographic models: equilibrium population size, instantaneous population growth, and instantaneous population contraction. As you'll see this is really just a toy example because we will perform classification based on data from a single locus; in practice this would be ill-advised and you would want to use data from many loci simulataneously.
So lets do some simulation using `ms` and summarize those simulations using the `sample_stats` program that Hudson provides. Ultimately we will only use two summary stats for classification, but one could use many more. Each of these simulations should take a few seconds to run._____no_output_____
<code>
#simulate under the equilibrium model
!./ms 20 2000 -t 100 -r 100 10000 | ./sample_stats > equilibrium.msOut.stats_____no_output_____#simulate under the contraction model
!./ms 20 2000 -t 100 -r 100 10000 -en 0 1 0.5 -en 0.2 1 1 | ./sample_stats > contraction.msOut.stats_____no_output_____#simulate under the growth model
!./ms 20 2000 -t 100 -r 100 10000 -en 0.2 1 0.5 | ./sample_stats > growth.msOut.stats_____no_output_____#now lets suck up the data columns we want for each of these files, and create one big training set; we will use numpy for this
# note that we are only using two columns of the data- these correspond to segSites and Fay & Wu's H
import numpy as np
X1 = np.loadtxt("equilibrium.msOut.stats",usecols=(3,9))
X2 = np.loadtxt("contraction.msOut.stats",usecols=(3,9))
X3 = np.loadtxt("growth.msOut.stats",usecols=(3,9))
X = np.concatenate((X1,X2,X3))
#create associated 'labels' -- these will be the targets for training
y = [0]*len(X1) + [1]*len(X2) + [2]*len(X3)
Y = np.array(y)
_____no_output_____#the last step in this process will be to shuffle the data, and then split it into a training set and a testing set
#the testing set will NOT be used during training, and will allow us to check how well the classifier is doing
#scikit-learn has a very convenient function for doing this shuffle and split operation
#
# will will keep out 10% of the data for testing
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,test_size=0.1)_____no_output_____
</code>
# Step 2: train our classifier and visualize decision surface
Now that we have a training and testing set ready to go, we can move on to training our classifier. For this example we will use a random forest classifier (Breiman 2001). This is all implemented in `scikit-learn` and so the code is very brief. _____no_output_____
<code>
from sklearn.ensemble import RandomForestClassifier
rfClf = RandomForestClassifier(n_estimators=100,n_jobs=10)
clf = rfClf.fit(X_train, Y_train)
_____no_output_____
</code>
That's it! The classifier is trained. This Random Forest classifer used 100 decision trees in its ensemble, a pretty large number considering that we are only using two summary stats to represent our data. Nevertheless it trains on the data very, very quickly.
Confession: the real reason we are using only two summary statistics right here is because it makes it really easy to visualize that classifier's decision surface: which regions of the feature space would be assigned to which class? Let's have a look!
(Note: I have increased the h argument for the call to `make_meshgrid` below, coarsening the contour plot in the interest of efficiency. Decreasing this will yield a smoother plot, but may take a while and use up a lot more memory. Adjust at your own risk!)_____no_output_____
<code>
from sklearn.preprocessing import normalize
#These two functions (taken from scikit-learn.org) plot the decision boundaries for a classifier.
def plot_contours(ax, clf, xx, yy, **params):
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
out = ax.contourf(xx, yy, Z, **params)
return out
def make_meshgrid(x, y, h=.05):
x_min, x_max = x.min() - 1, x.max() + 1
y_min, y_max = y.min() - 1, y.max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
return xx, yy
#Let's do the plotting
import matplotlib.pyplot as plt
fig,ax= plt.subplots(1,1)
X0, X1 = X[:, 0], X[:, 1]
xx, yy = make_meshgrid(X0, X1, h=0.2)
plot_contours(ax, clf, xx, yy, cmap=plt.cm.coolwarm, alpha=0.8)
# plotting only a subset of our data to keep things from getting too cluttered
ax.scatter(X_test[:200, 0], X_test[:200, 1], c=Y_test[:200], cmap=plt.cm.coolwarm, edgecolors='k')
ax.set_xlabel(r"$\theta_{w}$", fontsize=14)
ax.set_ylabel(r"Fay and Wu's $H$", fontsize=14)
ax.set_xticks(())
ax.set_yticks(())
ax.set_title("Classifier decision surface", fontsize=14)
plt.show()_____no_output_____
</code>
Above we can see which regions of our feature space are assigned to each class: dark blue shaded areas will be classified as Equilibrium, faint blue as Contraction, and red as Growth. Note the non-linear decision surface. Looks pretty cool! And also illustrates how this type of classifier might be useful for discriminating among classes that are difficult to linearly separate. Also plotted are a subset of our test examples, as dots colored according to their true class. Looks like we are doing pretty well but have a few misclassifications. Would be nice to quantify this somehow, which brings us to...
# Step 3: benchmark our classifier
The last step of the process is to use our trained classifier to predict which demographic models our test data are drawn from. Recall that the classifier hasn't seen these test data so this should be a fair test of how well the classifier will perform on any new data we throw at it in the future. We will visualize performance using a confusion matrix. _____no_output_____
<code>
#here's the confusion matrix function
def makeConfusionMatrixHeatmap(data, title, trueClassOrderLs, predictedClassOrderLs, ax):
data = np.array(data)
data = normalize(data, axis=1, norm='l1')
heatmap = ax.pcolor(data, cmap=plt.cm.Blues, vmin=0.0, vmax=1.0)
for i in range(len(predictedClassOrderLs)):
for j in reversed(range(len(trueClassOrderLs))):
val = 100*data[j, i]
if val > 50:
c = '0.9'
else:
c = 'black'
ax.text(i + 0.5, j + 0.5, '%.2f%%' % val, horizontalalignment='center', verticalalignment='center', color=c, fontsize=9)
cbar = plt.colorbar(heatmap, cmap=plt.cm.Blues, ax=ax)
cbar.set_label("Fraction of simulations assigned to class", rotation=270, labelpad=20, fontsize=11)
# put the major ticks at the middle of each cell
ax.set_xticks(np.arange(data.shape[1]) + 0.5, minor=False)
ax.set_yticks(np.arange(data.shape[0]) + 0.5, minor=False)
ax.axis('tight')
ax.set_title(title)
#labels
ax.set_xticklabels(predictedClassOrderLs, minor=False, fontsize=9, rotation=45)
ax.set_yticklabels(reversed(trueClassOrderLs), minor=False, fontsize=9)
ax.set_xlabel("Predicted class")
ax.set_ylabel("True class")
#now the actual work
#first get the predictions
preds=clf.predict(X_test)
counts=[[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]]
for i in range(len(Y_test)):
counts[Y_test[i]][preds[i]] += 1
counts.reverse()
classOrderLs=['equil','contraction','growth']
#now do the plotting
fig,ax= plt.subplots(1,1)
makeConfusionMatrixHeatmap(counts, "Confusion matrix", classOrderLs, classOrderLs, ax)
plt.show()_____no_output_____
</code>
Looks pretty good. But can we make it better? Well a simple way might be to increase the number of features (i.e. summary statistics) we use as input. Let's give that a whirl using all of the output from Hudson's `sample_stats`_____no_output_____
<code>
X1 = np.loadtxt("equilibrium.msOut.stats",usecols=(1,3,5,7,9))
X2 = np.loadtxt("contraction.msOut.stats",usecols=(1,3,5,7,9))
X3 = np.loadtxt("growth.msOut.stats",usecols=(1,3,5,7,9))
X = np.concatenate((X1,X2,X3))
#create associated 'labels' -- these will be the targets for training
y = [0]*len(X1) + [1]*len(X2) + [2]*len(X3)
Y = np.array(y)
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,test_size=0.1)
rfClf = RandomForestClassifier(n_estimators=100,n_jobs=10)
clf = rfClf.fit(X_train, Y_train)
preds=clf.predict(X_test)
counts=[[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]]
for i in range(len(Y_test)):
counts[Y_test[i]][preds[i]] += 1
counts.reverse()
fig,ax= plt.subplots(1,1)
makeConfusionMatrixHeatmap(counts, "Confusion matrix", classOrderLs, classOrderLs, ax)
plt.show()_____no_output_____
</code>
Even better!
Hopefully this simple example gives you the gist of how supervised ML can be used. In the future we will populate this GitHub repository with further examples that might be illustrative. _____no_output_____
| {
"repository": "kern-lab/popGenMachineLearningExamples",
"path": "demographicModelSelectionExample.ipynb",
"matched_keywords": [
"population genetics"
],
"stars": 23,
"size": 119909,
"hexsha": "d08ccd875b83a906b01d5539e0ad5ef604653c96",
"max_line_length": 55538,
"avg_line_length": 299.7725,
"alphanum_fraction": 0.9129256353
} |
# Notebook from wellcomecollection/data-science
Path: notebooks/archive_exploration/notebooks/01 - subject coocurrence.ipynb
# Archive data
The Wellcome archive sits in a collections management system called CALM, which follows a rough set of standards and guidelines for storing archival records called [ISAD(G)](https://en.wikipedia.org/wiki/ISAD(G). The archive is comprised of _collections_, each of which has a hierarchical set of series, sections, subjects, items and pieces sitting underneath it.
In the following notebooks I'm going to explore it and try to make as much sense of it as I can programatically.
Let's start by loading in a few useful packages and defining some nice utils._____no_output_____
<code>
%matplotlib inline
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style("white")
plt.rcParams["figure.figsize"] = (20, 20)
import pandas as pd
import numpy as np
import networkx as nx
from sklearn.cluster import AgglomerativeClustering
from umap import UMAP
from tqdm import tqdm_notebook as tqdm_____no_output_____def flatten(input_list):
return [item for sublist in input_list for item in sublist]
def cartesian(*arrays):
return np.array([x.reshape(-1) for x in np.meshgrid(*arrays)]).T
def clean(subject):
return subject.strip().lower().replace("<p>", "")_____no_output_____
</code>
let's load up our CALM data. The data has been exported in its entirety as a single `.json` where each line is a record.
You can download the data yourself using [this script](https://github.com/wellcometrust/platform/blob/master/misc/download_oai_harvest.py). Stick the `.json` in the neighbouring `/data` directory to run the rest of the notebook seamlessly._____no_output_____
<code>
df = pd.read_json("data/calm_records.json")_____no_output_____len(df)_____no_output_____df.astype(str).describe()_____no_output_____
</code>
### Exploring individual columns
At the moment I have no idea what kind of information CALM contains - lets look at the list of column names_____no_output_____
<code>
list(df)_____no_output_____
</code>
Here I'm looking through a sample of values in each column, choosing the columns to explore based on the their headings, a bit of contextual info from colleagues and the `df.describe()` above. _____no_output_____
<code>
df["Subject"]_____no_output_____
</code>
### After much trial and error...
Subjects look like an interesting avenue to explore further. Where subjects have _actually_ been filled in and the entry is not `None`, a list of subjects is returned.
We can explore some of these subjects' subtleties by creating an adjacency matrix. We'll count the number of times each subject appears alongside every other subject and return a big $n \times n$ matrix, where $n$ is the total number of unique subjects.
We can use this adjacency matrix for all sorts of stuff, but we have to build it first. To start, lets get a uniqur list of all subjects. This involves unpacking each sub-list and flattening them out into one long list, before finding the unique elements. We'll also use the `clean` function defined above to get rid of any irregularities which might become annoying later on._____no_output_____
<code>
subjects = flatten(df["Subject"].dropna().tolist())
print(len(subjects))
subjects = list(set(map(clean, subjects)))
print(len(subjects))_____no_output_____
</code>
At this point it's often helpful to index our data, ie transform words into numbers. We'll create two dictionaries which map back and forth between the subjects and their corresponding indicies:_____no_output_____
<code>
index_to_subject = {index: subject for index, subject in enumerate(subjects)}
subject_to_index = {subject: index for index, subject in enumerate(subjects)}_____no_output_____
</code>
Lets instantiate an empty numpy array which we'll then fill with our coocurrence data. Each column and each row will represent a subject - each cell (the intersection of a column and row) will therefore represent the 'strength' of the interaction between those subjects. As we haven't seen any interactions yet, we'll set every array element to 0._____no_output_____
<code>
adjacency = np.empty((len(subjects), len(subjects)), dtype=np.uint16)_____no_output_____
</code>
To populate the matrix, we want to find every possible combination of subject in each sub-list from our original column, ie if we had the subjects
`[Disease, Heart, Heart Diseases, Cardiology]`
we would want to return
`
[['Disease', 'Disease'],
['Heart', 'Disease'],
['Heart Diseases', 'Disease'],
['Cardiology', 'Disease'],
['Disease', 'Heart'],
['Heart', 'Heart'],
['Heart Diseases', 'Heart'],
['Cardiology', 'Heart'],
['Disease', 'Heart Diseases'],
['Heart', 'Heart Diseases'],
['Heart Diseases', 'Heart Diseases'],
['Cardiology', 'Heart Diseases'],
['Disease', 'Cardiology'],
['Heart', 'Cardiology'],
['Heart Diseases', 'Cardiology'],
['Cardiology', 'Cardiology']]
`
The `cartesian()` function which I've defined above will do that for us. We then find the appropriate intersection in the matrix and add another unit of 'strength' to it.
We'll do this for every row of subjects in the `['Subjects']` column._____no_output_____
<code>
for row_of_subjects in tqdm(df["Subject"].dropna()):
for subject_pair in cartesian(row_of_subjects, row_of_subjects):
subject_index_1 = subject_to_index[clean(subject_pair[0])]
subject_index_2 = subject_to_index[clean(subject_pair[1])]
adjacency[subject_index_1, subject_index_2] += 1_____no_output_____
</code>
We can do all sorts of fun stuff now - adjacency matrices are the foundation on which all of graph theory is built. However, because it's a bit more interesting, I'm going to start with some dimensionality reduction. We'll get to the graphy stuff later.
Using [UMAP](https://github.com/lmcinnes/umap), we can squash the $n \times n$ dimensional matrix down into a $n \times m$ dimensional one, where $m$ is some arbitrary integer. Setting $m$ to 2 will then allow us to plot each subject as a point on a two dimensional plane. UMAP will try to preserve the 'distances' between subjects - in this case, that means that related or topically similar subjects will end up clustered together, and different subjects will move apart._____no_output_____
<code>
embedding_2d = pd.DataFrame(UMAP(n_components=2).fit_transform(adjacency))_____no_output_____embedding_2d.plot.scatter(x=0, y=1);_____no_output_____
</code>
We can isolate the clusters we've found above using a number of different methods - `scikit-learn` provides easy access to some very powerful algorithms. Here I'll use a technique called _agglomerative clustering_, and make a guess that 15 is an appropriate number of clusters to look for._____no_output_____
<code>
n_clusters = 15
embedding_2d["labels"] = AgglomerativeClustering(n_clusters).fit_predict(
embedding_2d.values
)
embedding_2d.plot.scatter(x=0, y=1, c="labels", cmap="Paired");_____no_output_____
</code>
We can now use the `index_to_subject` mapping that we created earlier to examine which subjects have been grouped together into clusters_____no_output_____
<code>
for i in range(n_clusters):
print(str(i) + " " + "-" * 80 + "\n")
print(
np.sort(
[
index_to_subject[index]
for index in embedding_2d[embedding_2d["labels"] == i].index.values
]
)
)
print("\n")_____no_output_____
</code>
Interesting! Taking a look at some of the smaller clusters of subjects (for the sake of space and your willingness to read lists of 100s of subjects):
One seems to be quite distinctly involved with drugs and associated topics/treatments:
```
13 --------------------------------------------------------------------------------
['acquired immunodeficiency syndrome' 'alcohol' 'amphetamines'
'analgesics, opioid' 'campaign' 'cannabis' 'cocaine' 'counseling'
'counterculture' 'crime' 'drugs' 'education' 'hallucinogens' 'heroin'
'hypnotics and sedatives' 'information services' 'inhalant abuse'
'lysergic acid diethylamide' 'n-methyl-3,4-methylenedioxyamphetamine'
'opioid' 'policy' 'prescription drugs' 'rehabilitation' 'renabilitation'
'self-help']
```
others are linked to early/fundamental research on DNA and genetics:
```
9 --------------------------------------------------------------------------------
['bacteriophages' 'biotechnology' 'caenorhabditis elegans'
'chromosome mapping' 'cloning, organism' 'discoveries in science' 'dna'
'dna, recombinant' 'genetic code' 'genetic engineering'
'genetic research' 'genetic therapy' 'genome, human' 'genomics'
'magnetic resonance spectroscopy' 'meiosis' 'models, molecular'
'molecular biology' 'nobel prize' 'retroviridae' 'rna'
'sequence analysis' 'viruses']
```
and others about food
```
14 --------------------------------------------------------------------------------
['acids' 'advertising' 'ambergris' 'animals' 'beer' 'biscuits' 'brassica'
'bread' 'butter' 'cacao' 'cake' 'candy' 'carbohydrates' 'cattle'
'cereals' 'cheese' 'chemistry, agricultural' 'cider' 'colouring agents'
'condiments' 'cooking (deer)' 'cooking (poultry)' 'cooking (venison)'
'cucumis sativus' 'dairy products' 'daucus carota' 'desserts'
'dried fruit' 'ecology' 'economics' 'eggs' 'environmental health'
'european rabbit' 'fermentation' 'food additives' 'food and beverages'
'food preservation' 'food, genetically modified' 'fruit' 'fruit drinks'
'fungi' 'game and game-birds' 'grapes' 'hands' 'health attitudes'
'herbaria' 'honey' 'jam' 'legislation' 'lettuce' 'meat' 'meat products'
'nuts' 'oatmeal' 'olive' 'onions' 'peas' 'pickles' 'pies' 'poultry'
'preserves (jams)' 'puddings' 'rice' 'seafood' 'seeds' 'sheep'
'sociology' 'solanum tuberosum' 'spinacia oleracea' 'sweetening agents'
'swine' 'syrups' 'vegetables' 'vitis' 'whiskey' 'wild flowers' 'wine']
```
These are all noticeably different themes, and they appear to be nicely separated in the topic-space we've built._____no_output_____
| {
"repository": "wellcomecollection/data-science",
"path": "notebooks/archive_exploration/notebooks/01 - subject coocurrence.ipynb",
"matched_keywords": [
"RNA",
"genomics",
"biology",
"ecology"
],
"stars": 5,
"size": 13843,
"hexsha": "d08e7ed2078c8d3e3291f300558e13c25ad077e4",
"max_line_length": 481,
"avg_line_length": 35.9558441558,
"alphanum_fraction": 0.5989308676
} |
# Notebook from catalyst-cooperative/electricity-demand-mapping
Path: notebooks/historical_planning_areas.ipynb
# Notebook Goal & Approach_____no_output_____## Goal
For each FERC 714 respondent that reports hourly demand as an electricity planning area, create a geometry representing the geographic area in which that electricity demand originated. Create a separate geometry for each year in which data is available._____no_output_____## Approach
* Use the `eia_code` found in the `respondent_id_ferc714` table to link FERC 714 respondents to their corresponding EIA utilities or balancing areas.
* Use the `balancing_authority_eia861` and `sales_eia861` tables to figure out which respondents correspond to what utility or utilities (if a BA), and which states of operation.
* Use the `service_territory_eia861` table to link those combinations of years, utilities, and states of operation to collections of counties.
* Given the FIPS codes of the counties associated with each utility or balancing area in a given year, use geospatial data from the US Census to compile an annual demand area geometry.
* Merge those geometries back in with the `respondent_id_ferc714` table, along with additional EIA balancing area and utility IDs / Codes on a per-year basis._____no_output_____# Imports & Config_____no_output_____
<code>
%load_ext autoreload
%autoreload 2_____no_output_____# Standard Libraries:
import dateutil
import logging
import pathlib
import pickle
import re
import sys
import zipfile
# 3rd Party Libraries:
import contextily as ctx
import geopandas
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import sqlalchemy as sa
# Local Packages:
import pudl_____no_output_____
</code>
## Configure Output Formatting_____no_output_____
<code>
sns.set()
%matplotlib inline_____no_output_____mpl.rcParams['figure.figsize'] = (20,8)
mpl.rcParams['figure.dpi'] = 150
pd.options.display.max_columns = 100
pd.options.display.max_rows = 100_____no_output_____
</code>
## Logging_____no_output_____
<code>
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(stream=sys.stdout)
log_format = '%(asctime)s [%(levelname)8s] %(name)s:%(lineno)s %(message)s'
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
logger.handlers = [handler]_____no_output_____
</code>
## PUDL Setup_____no_output_____
<code>
pudl_settings = pudl.workspace.setup.get_defaults()
ferc1_engine = sa.create_engine(pudl_settings['ferc1_db'])
pudl_engine = sa.create_engine(pudl_settings['pudl_db'])
pudl_out = pudl.output.pudltabl.PudlTabl(pudl_engine)
pudl_settings_____no_output_____
</code>
# Parameters_____no_output_____
<code>
MAP_CRS = "EPSG:3857"
CALC_CRS = "ESRI:102003"_____no_output_____
</code>
# Function Definitions_____no_output_____## Dummy EIA 861 ETL_____no_output_____
<code>
def test_etl_eia(eia_inputs, pudl_settings):
"""
This is a dummy function that runs the first part of the EIA ETL
process -- everything up until the entity harvesting begins. For
use in this notebook only.
"""
eia860_tables = eia_inputs["eia860_tables"]
eia860_years = eia_inputs["eia860_years"]
eia861_tables = eia_inputs["eia861_tables"]
eia861_years = eia_inputs["eia861_years"]
eia923_tables = eia_inputs["eia923_tables"]
eia923_years = eia_inputs["eia923_years"]
# generate CSVs for the static EIA tables, return the list of tables
#static_tables = _load_static_tables_eia(datapkg_dir)
# Extract EIA forms 923, 860
eia860_raw_dfs = pudl.extract.eia860.Extractor().extract(eia860_years, testing=True)
eia861_raw_dfs = pudl.extract.eia861.Extractor().extract(eia861_years, testing=True)
eia923_raw_dfs = pudl.extract.eia923.Extractor().extract(eia923_years, testing=True)
# Transform EIA forms 860, 861, 923
eia860_transformed_dfs = pudl.transform.eia860.transform(eia860_raw_dfs, eia860_tables=eia860_tables)
eia861_transformed_dfs = pudl.transform.eia861.transform(eia861_raw_dfs, eia861_tables=eia861_tables)
eia923_transformed_dfs = pudl.transform.eia923.transform(eia923_raw_dfs, eia923_tables=eia923_tables)
# create an eia transformed dfs dictionary
eia_transformed_dfs = eia860_transformed_dfs.copy()
eia_transformed_dfs.update(eia861_transformed_dfs.copy())
eia_transformed_dfs.update(eia923_transformed_dfs.copy())
# convert types..
eia_transformed_dfs = pudl.helpers.convert_dfs_dict_dtypes(eia_transformed_dfs, 'eia')
return eia_transformed_dfs_____no_output_____
</code>
## Dummy EIA 861 Harvesting
* Used to separately test the EIA entity harvesting process with EIA 861
* Doesn't yet work b/c 861 is structured differently than 860/923._____no_output_____
<code>
def test_harvest_eia(eia_transformed_dfs, eia860_years, eia861_years, eia923_years):
entities_dfs, eia_transformed_dfs = pudl.transform.eia.transform(
eia_transformed_dfs,
eia860_years=eia860_years,
eia861_years=eia861_years,
eia923_years=eia923_years,
)
# convert types..
entities_dfs = pudl.helpers.convert_dfs_dict_dtypes(entities_dfs, 'eia')
# Compile transformed dfs for loading...
return entities_dfs, eia_transformed_dfs_____no_output_____
</code>
## Compare Annual Demand vs. Sales_____no_output_____
<code>
def annual_demand_vs_sales(dhpa_ferc714, sales_eia861, ba_eia861):
"""
Categorize EIA Codes in FERC 714 as BA or Utility IDs.
Most FERC 714 respondent IDs are associated with an `eia_code` which
refers to either a `balancing_authority_id_eia` or a `utility_id_eia`
but no indication is given as to which type of ID each one is. This
is further complicated by the fact that EIA uses the same numerical
ID to refer to the same entity in most but not all cases, when that
entity acts as both a utility and as a balancing authority.
In order to identify which type of ID each `eia_code` is, this
funciton compares the annual demand reported in association with
each code in the FERC 714 hourly planning area time series, and in
the EIA 861 sales table -- using the ID both as a utility and as a
balancing authority ID. The correlation between the FERC 714 demand
and the EIA 861 sales should be much higher for one type of ID than
the other, indicating which type of ID is represented in the FERC
714 data.
Args:
dhpa_ferc714 (pandas.DataFrame): The FERC 714 hourly demand
time series.
sales_eia861 (pandas.DataFrame): The EIA 861 Sales table.
ba_eia861 (pandas.DataFrame): The EIA 861 Balancing Authority
table, which contains the mapping between EIA Balancing
Authority Codes (3-4 letters) and EIA Balancing Authority
IDs (integers). The codes are present in the Sales table,
but the IDs are what the eia_code refers to.
Returns:
pandas.DataFrame: A table containing FERC 714 respondent IDs,
EIA codes, and a column indicating whether that code was
found to be more consistent with Balancing Authority or
Utility electricity demand / sales.
"""
# Sum up FERC 714 demand by report_year and eia_code:
dhpa_ferc714_by_eia_code = (
dhpa_ferc714
.groupby(["eia_code", "report_year"])["demand_mwh"]
.sum()
.reset_index()
)
# Sum up the EIA 861 sales by Utility ID:
sales_eia861_by_util = (
sales_eia861.groupby(["utility_id_eia", "report_date"])["sales_mwh"]
.sum()
.reset_index()
.assign(report_year=lambda x: x.report_date.dt.year)
.drop("report_date", axis="columns")
.rename(columns={"sales_mwh": "sales_utility_mwh"})
)
# Need to translate the BA Code to BA ID for comparison w/ eia_code
ba_codes_and_ids = (
ba_eia861[["balancing_authority_code_eia", "balancing_authority_id_eia", "report_date"]]
.drop_duplicates()
.assign(report_year=lambda x: x.report_date.dt.year)
.drop("report_date", axis="columns")
.dropna()
)
# Sum up the EIA 861 sales by Balancing Authority Code:
sales_eia861_by_ba = (
sales_eia861
.groupby(["balancing_authority_code_eia", "report_date"], observed=True)["sales_mwh"]
.sum()
.reset_index()
.assign(report_year=lambda x: x.report_date.dt.year)
.drop("report_date", axis="columns")
.rename(columns={"sales_mwh": "sales_ba_mwh"})
.query("balancing_authority_code_eia!='UNK'")
.merge(ba_codes_and_ids)
)
# Combine the demand and sales data with all the IDs
demand_and_sales = (
dhpa_ferc714_by_eia_code
.merge(
sales_eia861_by_util,
left_on=["eia_code", "report_year"],
right_on=["utility_id_eia", "report_year"],
how="left"
)
.merge(
sales_eia861_by_ba,
left_on=["eia_code", "report_year"],
right_on=["balancing_authority_id_eia", "report_year"],
how="left"
)
.astype({
"eia_code": pd.Int64Dtype(),
"utility_id_eia": pd.Int64Dtype(),
"balancing_authority_id_eia": pd.Int64Dtype(),
})
.assign(
ba_ratio=lambda x: x.sales_ba_mwh / x.demand_mwh,
utility_ratio=lambda x: x.sales_utility_mwh / x.demand_mwh,
)
)
return demand_and_sales_____no_output_____
</code>
## EIA Code Categorization_____no_output_____
<code>
def categorize_eia_code(rids_ferc714, utils_eia860, ba_eia861):
"""
Categorize EIA Codes in FERC 714 as BA or Utility IDs.
Most FERC 714 respondent IDs are associated with an `eia_code` which
refers to either a `balancing_authority_id_eia` or a `utility_id_eia`
but no indication is given as to which type of ID each one is. This
is further complicated by the fact that EIA uses the same numerical
ID to refer to the same entity in most but not all cases, when that
entity acts as both a utility and as a balancing authority.
Given the nature of the FERC 714 hourly demand dataset, this function
assumes that if the `eia_code` appears in the EIA 861 Balancing
Authority table, that it should be labeled `balancing_authority`.
If the `eia_code` appears only in the EIA 860 Utility table, then
it is labeled `utility`. These labels are put in a new column named
`respondent_type`. If the planning area's `eia_code` does not appear in
either of those tables, then `respondent_type is set to NA.
Args:
rids_ferc714 (pandas.DataFrame): The FERC 714 `respondent_id` table.
utils_eia860 (pandas.DataFrame): The EIA 860 Utilities output table.
ba_eia861 (pandas.DataFrame): The EIA 861 Balancing Authority table.
Returns:
pandas.DataFrame: A table containing all of the columns present in
the FERC 714 `respondent_id` table, plus a new one named
`respondent_type` which can take on the values `balancing_authority`,
`utility`, or the special value pandas.NA.
"""
ba_ids = set(ba_eia861.balancing_authority_id_eia.dropna())
util_not_ba_ids = set(utils_eia860.utility_id_eia.dropna()).difference(ba_ids)
new_rids = rids_ferc714.copy()
new_rids["respondent_type"] = pd.NA
new_rids.loc[new_rids.eia_code.isin(ba_ids), "respondent_type"] = "balancing_authority"
new_rids.loc[new_rids.eia_code.isin(util_not_ba_ids), "respondent_type"] = "utility"
ba_rids = new_rids[new_rids.respondent_type=="balancing_authority"]
util_rids = new_rids[new_rids.respondent_type=="utility"]
na_rids = new_rids[new_rids.respondent_type.isnull()]
ba_rids = (
ba_rids.merge(
ba_eia861
.filter(like="balancing_")
.drop_duplicates(subset=["balancing_authority_id_eia", "balancing_authority_code_eia"]),
how="left", left_on="eia_code", right_on="balancing_authority_id_eia"
)
)
util_rids = (
util_rids.merge(
utils_eia860[["utility_id_eia", "utility_name_eia"]]
.drop_duplicates("utility_id_eia"),
how="left", left_on="eia_code", right_on="utility_id_eia"
)
)
new_rids = (
pd.concat([ba_rids, util_rids, na_rids])
.astype({
"respondent_type": pd.StringDtype(),
"balancing_authority_code_eia": pd.StringDtype(),
"balancing_authority_id_eia": pd.Int64Dtype(),
"balancing_authority_name_eia": pd.StringDtype(),
"utility_id_eia": pd.Int64Dtype(),
"utility_name_eia": pd.StringDtype(),
})
)
return new_rids_____no_output_____
</code>
## Georeference Balancing Authorities_____no_output_____
<code>
def georef_bas(ba_eia861, st_eia861, sales_eia861, census_gdf):
"""
Create a GeoDataFrame mapping BAs to Utils to county geometries by year.
This GDF includes the following columns:
balancing_authority_id_eia (ba_eia861)
balancing_authority_name_eia (ba_eia861)
balancing_authority_code_eia (ba_eia861)
utility_id_eia (sales_eia861)
utility_name_eia (sales_eia861)
county_id_fips (st_eia861)
county (st_eia861)
state_id_fips (st_eia861)
state (st_eia861)
geometry (census_gdf)
county_name_census (census_gdf)
It includes information both about which counties are associated with
utilities that are part of balancing authorities, and utilities that
are not part part of balancing authorities, so should be possible to
use it to generate geometries for all of the respondents in FERC 714,
both BAs and Utils.
"""
# Make sure that there aren't any more BA IDs we can recover from later years:
ba_ids_missing_codes = (
ba_eia861.loc[ba_eia861.balancing_authority_code_eia.isnull(), "balancing_authority_id_eia"]
.drop_duplicates()
.dropna()
)
assert len(ba_eia861[
(ba_eia861.balancing_authority_id_eia.isin(ba_ids_missing_codes)) &
(ba_eia861.balancing_authority_code_eia.notnull())
]) == 0
# Which utilities were part of what balancing areas in 2010-2012?
early_ba_by_util = (
ba_eia861
.query("report_date <= '2012-12-31'")
.loc[:, [
"report_date",
"balancing_authority_id_eia",
"balancing_authority_code_eia",
"utility_id_eia",
"balancing_authority_name_eia",
]]
.drop_duplicates(subset=["report_date", "balancing_authority_id_eia", "utility_id_eia"])
)
# Create a dataframe that associates utilities and balancing authorities.
# This information is directly avaialble in the early_ba_by_util dataframe
# but has to be compiled for 2013 and later years based on the utility
# BA associations that show up in the Sales table
# Create an annual, normalized version of the BA table:
ba_normed = (
ba_eia861
.loc[:, [
"report_date",
"state",
"balancing_authority_code_eia",
"balancing_authority_id_eia",
"balancing_authority_name_eia",
]]
.drop_duplicates(subset=[
"report_date",
"state",
"balancing_authority_code_eia",
"balancing_authority_id_eia",
])
)
ba_by_util = (
pd.merge(
ba_normed,
sales_eia861
.loc[:, [
"report_date",
"state",
"utility_id_eia",
"balancing_authority_code_eia"
]].drop_duplicates()
)
.loc[:, [
"report_date",
"state",
"utility_id_eia",
"balancing_authority_id_eia"
]]
.append(early_ba_by_util[["report_date", "utility_id_eia", "balancing_authority_id_eia"]])
.drop_duplicates()
.merge(ba_normed)
.dropna(subset=["report_date", "utility_id_eia", "balancing_authority_id_eia"])
.sort_values(["report_date", "balancing_authority_id_eia", "utility_id_eia", "state"])
)
# Merge in county FIPS IDs for each county served by the utility from
# the service territory dataframe. We do an outer merge here so that we
# retain any utilities that are not part of a balancing authority. This
# lets us generate both BA and Util maps from the same GeoDataFrame
# We have to do this separately for the data up to 2012 (which doesn't
# include state) and the 2013 and onward data (which we need to have
# state for)
early_ba_util_county = (
ba_by_util.drop("state", axis="columns")
.merge(st_eia861, on=["report_date", "utility_id_eia"], how="outer")
.query("report_date <= '2012-12-31'")
)
late_ba_util_county = (
ba_by_util
.merge(st_eia861, on=["report_date", "utility_id_eia", "state"], how="outer")
.query("report_date >= '2013-01-01'")
)
ba_util_county = pd.concat([early_ba_util_county, late_ba_util_county])
# Bring in county geometry information based on FIPS ID from Census
ba_util_county_gdf = (
census_gdf[["GEOID10", "NAMELSAD10", "geometry"]]
.to_crs(MAP_CRS)
.rename(
columns={
"GEOID10": "county_id_fips",
"NAMELSAD10": "county_name_census",
}
)
.merge(ba_util_county)
)
return ba_util_county_gdf_____no_output_____
</code>
## Map Balancing Authorities_____no_output_____
<code>
def map_ba(ba_ids, year, ba_util_county_gdf, save=False):
"""
Create a map of a balancing authority for a historical year.
Args:
ba_ids (iterable): A collection of Balancing Authority IDs.
year (int): The year for which to create a map.
ba_util_county_gdf (geopandas.GeoDataFrame): A dataframe
associating report_date, balancing_authority_id_eia, and
county_id_fips.
save (bool): If True, save the figure to disk.
Returns:
None
"""
map_gdf = (
ba_util_county_gdf[
(ba_util_county_gdf.report_date.dt.year == year) &
(ba_util_county_gdf.balancing_authority_id_eia.isin(ba_ids)) &
(~ba_util_county_gdf.county_id_fips.str.match("^02")) & # Avoid Alaska
(~ba_util_county_gdf.county_id_fips.str.match("^15")) & # Avoid Hawaii
(~ba_util_county_gdf.county_id_fips.str.match("^72")) # Avoid Puerto Rico
]
.drop_duplicates(subset=["balancing_authority_id_eia", "county_id_fips"])
)
ax = map_gdf.plot(figsize=(20, 20), color="black", alpha=0.25, linewidth=0.25)
plt.title(f"Balancing Areas ({year=})")
ctx.add_basemap(ax)
if save is True:
plt.savefig(f"BA_Overlap_{year}.jpg")_____no_output_____def compare_hifld_eia_ba(ba_code, hifld_gdf, eia_gdf):
"""
Compare historical EIA BAs vs. HIFLD geometries.
"""
fig, (hifld_ax, eia_ax) = plt.subplots(nrows=1, ncols=2, sharex=True, sharey=True)
hifld_ax.set_title(f"{ba_code} (HIFLD)")
hifld_gdf[hifld_gdf.ABBRV==ba_code].to_crs(MAP_CRS).plot(ax=hifld_ax, linewidth=0)
eia_ax.set_title(f"{ba_code} (EIA)")
eia_gdf[
(eia_gdf.balancing_authority_code_eia==ba_code) &
(eia_gdf.report_date.dt.year == 2017)
].plot(ax=eia_ax, linewidth=0.1)
plt.show()_____no_output_____
</code>
# Read Data_____no_output_____## EIA 860 via PUDL Outputs_____no_output_____
<code>
plants_eia860 = pudl_out.plants_eia860()
utils_eia860 = pudl_out.utils_eia860()_____no_output_____
</code>
## EIA 861 (2010-2018)
* Not yet fully integrated into PUDL
* Post-transform harvesting process isn't compatible w/ EIA 861 structure
* Only getting the `sales_eia861`, `balancing_authority_eia861`, and `service_territory_eia861` tables_____no_output_____
<code>
%%time
logger.setLevel("WARN")
eia_years = list(range(2010, 2019))
eia_inputs = {
"eia860_years": [],
"eia860_tables": pudl.constants.pudl_tables["eia860"],
"eia861_years": eia_years,
"eia861_tables": pudl.constants.pudl_tables["eia861"],
"eia923_years": [],
"eia923_tables": pudl.constants.pudl_tables["eia923"],
}
eia_transformed_dfs = test_etl_eia(eia_inputs=eia_inputs, pudl_settings=pudl_settings)
logger.setLevel("INFO")_____no_output_____ba_eia861 = eia_transformed_dfs["balancing_authority_eia861"].copy()
st_eia861 = eia_transformed_dfs["service_territory_eia861"].copy()
sales_eia861 = eia_transformed_dfs["sales_eia861"].copy()_____no_output_____raw_eia861_dfs = pudl.extract.eia861.Extractor().extract(years=range(2010,2019), testing=True)_____no_output_____
</code>
## FERC 714 (2006-2018)_____no_output_____
<code>
%%time
logger.setLevel("WARN")
raw_ferc714 = pudl.extract.ferc714.extract(pudl_settings=pudl_settings)
tfr_ferc714 = pudl.transform.ferc714.transform(raw_ferc714)
logger.setLevel("INFO")_____no_output_____
</code>
## HIFLD Electricity Planning Areas (2018)
* Electricty Planning Area geometries from HIFLD.
* Indexed by `ID` which corresponds to EIA utility or balancing area IDs.
* Only valid for 2017-2018._____no_output_____
<code>
hifld_pa_gdf = (
pudl.analysis.demand_mapping.get_hifld_planning_areas_gdf(pudl_settings)
.to_crs(MAP_CRS)
)_____no_output_____
</code>
## US Census DP1 (2010)
* This GeoDataFrame contains county-level geometries and demographic data._____no_output_____
<code>
%%time
census_gdf = (
pudl.analysis.demand_mapping.get_census2010_gdf(pudl_settings, layer="county")
.to_crs(MAP_CRS)
)_____no_output_____
</code>
# Combine Data_____no_output_____## Categorize FERC 714 Respondent IDs_____no_output_____
<code>
rids_ferc714 = (
tfr_ferc714["respondent_id_ferc714"]
.pipe(categorize_eia_code, utils_eia860, ba_eia861)
)_____no_output_____
</code>
## Add FERC 714 IDs to HIFLD_____no_output_____
<code>
hifld_pa_gdf = (
hifld_pa_gdf
.merge(rids_ferc714, left_on="ID", right_on="eia_code", how="left")
)_____no_output_____
</code>
## Add Respondent info to FERC 714 Demand_____no_output_____
<code>
dhpa_ferc714 = pd.merge(
tfr_ferc714["demand_hourly_pa_ferc714"],
tfr_ferc714["respondent_id_ferc714"],
on="respondent_id_ferc714",
how="left", # There are respondents with no demand
)_____no_output_____
</code>
# Utilities vs. Balancing Authorities
Exploration of the Balancing Authority EIA 861 table for cleanup
### Which columns are available in which years?
| Year | BA ID | BA Name | BA Code | Util ID | Util Name | State | N |
|------|-------|---------|---------|---------|-----------|-------|----|
| 2010 | XXXXX | XXXXXXX | | XXXXXXX | | |3193|
| 2011 | XXXXX | XXXXXXX | | XXXXXXX | | |3126|
| 2012 | XXXXX | XXXXXXX | | XXXXXXX | XXXXXXXXX | |3146|
| 2013 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 239|
| 2014 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 208|
| 2015 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 203|
| 2016 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 203|
| 2017 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 203|
| 2018 | XXXXX | XXXXXXX | XXXXXXX | | | XXXXX | 204|
### What does this table mean?
* In 2010-2012, the table says which utilities (by ID) are included in which balancing authorities.
* In 2013-2018, the table indicates which *states* a BA is operating in, and also provides a BA Code
### Questions:
* Where does the `balancing_authority_code` show up elsewhere in the EIA 860/861 data?
* `plants_eia860` (nowhere else that I know of)
* Are the BA to Utility mappings likely to remain valid throughout the entire time period? Can we propagate them forward?
* No, there's some variation year to year in which utilities are associated with which BAs
* Are the BA Code/Name to BA ID mappings permanent?
* No they aren't -- when a BA changes owners and names, the code changes, but ID stays the same._____no_output_____## Untangling HIFLD, FERC 714, & EIA IDs
* There are unspecified "EIA codes" associated with FERC 714 respondents.
* These IDs correspond to a mix of `utility_id_eia` and `balancing_authority_id_eia` values.
* Similarly, the ID field of the HIFLD geometries are a mix of BA and Utility IDs from EIA.
* This is extra confusing, because EIA *usually* uses the *same* ID for BAs and Utils.
* However, the EIA BA and Util IDs appear to be distinct namespaces
* Not all IDs which appear in both tables identify the same entity in both tables.
* In a few cases different IDs are used to identify the same entity when it shows up in both tables.
* It could be that whoever entered the IDs in the FERC 714 / HIFLD datasets didn't realize these were different sets of IDs._____no_output_____### BA / Utility ID Overlap
* Example of an ID that shows up in both, but refers to different entities, see `59504`
* `balancing_area_id_eia == 59504` is the Southwest Power Pool (SWPP).
* `utility_id_eia == 59504` is Kirkwood Community College, in MO.
* Example of an entity that exists in both datsets, but shows up with different IDs, see PacifiCorp.
* Has two BA IDs (East and West): `[14379, 14378]`
* Has one Utility ID: `14354`
* Example of an entity that shows up with the same ID in both tables:
* ID `15466` is Public Service Co of Colorado -- both a BA (PSCO) and a Utility._____no_output_____
<code>
# BA ID comes from EIA 861 BA Table
ba_ids = set(ba_eia861.balancing_authority_id_eia)
print(f"Total # of BA IDs: {len(ba_ids)}")
# Util ID comes from EIA 860 Utilities Entity table.
util_ids = set(pudl_out.utils_eia860().utility_id_eia)
print(f"Total # of Util IDs: {len(util_ids)}")
ba_not_util_ids = ba_ids.difference(util_ids)
print(f"BA IDs that are not Util IDs: {len(ba_not_util_ids)}")
util_not_ba_ids = util_ids.difference(ba_ids)
print(f"Util IDs that are not BA IDs: {len(util_not_ba_ids)}")
ba_and_util_ids = ba_ids.intersection(util_ids)
print(f"BA IDs that are also Util IDs: {len(ba_and_util_ids)}")_____no_output_____ba_and_util = (
ba_eia861
.loc[:, ["balancing_authority_id_eia", "balancing_authority_name_eia"]]
.dropna(subset=["balancing_authority_id_eia"])
.merge(
pudl_out.utils_eia860(),
left_on="balancing_authority_id_eia",
right_on="utility_id_eia",
how="inner"
)
.loc[:, [
"utility_id_eia",
"balancing_authority_name_eia",
"utility_name_eia",
]]
.rename(columns={"utility_id_eia": "util_ba_id"})
.drop_duplicates()
.reset_index(drop=True)
)
ba_not_util = (
ba_eia861.loc[ba_eia861.balancing_authority_id_eia.isin(ba_not_util_ids)]
.loc[:,["balancing_authority_id_eia", "balancing_authority_code_eia", "balancing_authority_name_eia"]]
.drop_duplicates(subset=["balancing_authority_id_eia", "balancing_authority_code_eia"])
.sort_values("balancing_authority_id_eia")
)_____no_output_____
</code>
### Missing IDs
* There are `eia_code` values that don't show up in the list of balancing authority IDs (2010-2018).
* There are also `eia_code` values that don't show up in the list of utility IDs (2009-2018).
* There are a few `eia_code` values that don't show up in either!
* Mostly this is an artifact of the different time covered by FERC 714 (2006-2018).
* If we look only at the respondents that reported non-zero demand for 2010-2018, we find that all of the `eia_code` values *do* appear in either the `blancing_authority_eia861` or `utilities_eia860` tables._____no_output_____
<code>
rids_ferc714[
(~rids_ferc714.eia_code.isin(ba_eia861.balancing_authority_id_eia.unique())) &
(~rids_ferc714.eia_code.isin(utils_eia860.utility_id_eia.unique()))
]_____no_output_____rids_recent = (
dhpa_ferc714
.groupby(["respondent_id_ferc714", "report_year"])
.agg({"demand_mwh": sum})
.reset_index()
.query("report_year >= 2010")
.query("demand_mwh >= 0.0")
.merge(rids_ferc714[["eia_code", "respondent_id_ferc714", "respondent_name_ferc714"]], how="left")
.drop(["report_year", "demand_mwh"], axis="columns")
.drop_duplicates()
)
assert len(rids_recent[
(~rids_recent.eia_code.isin(ba_eia861.balancing_authority_id_eia.unique())) &
(~rids_recent.eia_code.isin(utils_eia860.utility_id_eia.unique()))
]) == 0_____no_output_____
</code>
### BA to Utility Mappings are Many to Many
* Unsurprisingly, BAs often contain many utilities.
* However, it's also common for utilities to participate in more than one BA.
* About 1/3 of all utilities show up in association with more than one BA_____no_output_____
<code>
ba_to_util_mapping = (
ba_eia861[["balancing_authority_id_eia", "utility_id_eia"]]
.dropna(subset=["balancing_authority_id_eia", "utility_id_eia"])
.drop_duplicates(subset=["balancing_authority_id_eia", "utility_id_eia"])
.groupby(["balancing_authority_id_eia"])
.agg({
"utility_id_eia": "count"
})
)
plt.hist(ba_to_util_mapping.utility_id_eia, bins=99, range=(1,100))
plt.xlabel("# of Utils / BA")
plt.ylabel("# of BAs")
plt.title("Number of Utilities per Balancing Area");_____no_output_____util_to_ba_mapping = (
ba_eia861[["balancing_authority_id_eia", "utility_id_eia"]]
.dropna(subset=["balancing_authority_id_eia", "utility_id_eia"])
.drop_duplicates(subset=["balancing_authority_id_eia", "utility_id_eia"])
.groupby(["utility_id_eia"])
.agg({
"balancing_authority_id_eia": "count"
})
)
plt.hist(util_to_ba_mapping.balancing_authority_id_eia, bins=4, range=(1,5))
plt.title("Number of Balancing Authorities per Utility");_____no_output_____
</code>
## Georeferenced Demand Fraction
* With their original EIA codes the HIFLD Electricity Planning Areas only georeference some of the FERC 714 demand.
* It's about 86% in 2018. In 2013 and earlier years, the fraction starts to drop off more quickly, to 76% in 2010, and 58% in 2006.
* After manually identifying and fixing some bad and missing EIA codes in the FERC 714, the mapped fraction is much higher.
* 98% or more in 2014-2018, dropping to 87% in 2010, and 68% in 2006
* **However** because the geometries have also evolved over time, just the fact that the demand time series is linked to **some** HIFLD geometry, doesn't mean that it's the **right** geometry._____no_output_____
<code>
annual_demand_ferc714 = (
dhpa_ferc714
.groupby(["report_year"]).demand_mwh.sum()
.reset_index()
)
annual_demand_mapped = (
dhpa_ferc714[dhpa_ferc714.eia_code.isin(hifld_pa_gdf.eia_code)]
.groupby(["report_year"]).demand_mwh.sum()
.reset_index()
.merge(annual_demand_ferc714, on="report_year", suffixes=("_map", "_tot"))
.assign(
fraction_mapped=lambda x: x.demand_mwh_map / x.demand_mwh_tot
)
)_____no_output_____plt.plot("report_year", "fraction_mapped", data=annual_demand_mapped, lw=5)
plt.ylabel("Fraction of demand which is mapped")
plt.title("Completeness of HIFLD demand mapping by year")
plt.ylim(0.6, 1.05);_____no_output_____
</code>
# Historical Planning Area Geometries
Compile a GeoDataFrame that relates balancing authorities, their constituent utilities, and the collections of counties which are served by those utilities, across all the years for which we have EIA 861 data (2010-2018)_____no_output_____
<code>
ba_util_county_gdf = georef_bas(ba_eia861, st_eia861, sales_eia861, census_gdf)_____no_output_____ba_util_county_gdf.info()_____no_output_____for year in (2010, 2014, 2018):
map_ba(ba_util_county_gdf.balancing_authority_id_eia.unique(), year, ba_util_county_gdf, save=True)_____no_output_____
</code>
## Output Simplified Annual BA Geometries
* This takes half an hour so it's commented out.
* Resulting shapefile is ~250MB compressed. Seems too big.
* Need to figure out how to add explicity projection.
* Need to figure out how to make each year's BA geometries its own layer._____no_output_____
<code>
#%%time
#ba_fips_simplified = (
# ba_util_county_gdf
# .assign(report_year=lambda x: x.report_date.dt.year)
# .drop([
# "report_date",
# "state",
# "state_id_fips",
# "county",
# "county_name_census",
# "utility_id_eia",
# "utility_name_eia"
# ], axis="columns")
# .drop_duplicates(subset=["report_year", "balancing_authority_id_eia", "county_id_fips"])
# .dropna(subset=["report_year", "balancing_authority_id_eia", "county_id_fips"])
# .loc[:,["report_year", "balancing_authority_id_eia", "balancing_authority_code_eia", "balancing_authority_name_eia", "county_id_fips", "geometry"]]
#)
#ba_annual_gdf = (
# ba_fips_simplified
# .dissolve(by=["report_year", "balancing_authority_id_eia"])
# .reset_index()
# .drop("county_id_fips", axis="columns")
#)
#ba_output_gdf = (
# ba_annual_gdf
# .astype({
# "report_year": int,
# "balancing_authority_id_eia": float,
# "balancing_authority_code_eia": str,
# "balancing_authority_name_eia": str,
# })
# .rename(columns={
# "report_year": "year",
# "balancing_authority_id_eia": "ba_id",
# "balancing_authority_code_eia": "ba_code",
# "balancing_authority_name_eia": "ba_name",
# })
#)
#ba_output_gdf.to_file("ba_annual.shp")_____no_output_____
</code>
## Compare HIFLD and EIA BA maps for 2018_____no_output_____
<code>
for ba_code in hifld_pa_gdf.ABBRV.unique():
if ba_code in ba_util_county_gdf.balancing_authority_code_eia.unique():
compare_hifld_eia_ba(ba_code, hifld_pa_gdf, ba_util_county_gdf)_____no_output_____
</code>
## Time Evolution of BA Geometries
For each BA we now have a collection of annual geometries. How have they changed over time?_____no_output_____
<code>
for ba_code in ba_util_county_gdf.balancing_authority_code_eia.unique():
fig, axes = plt.subplots(nrows=3, ncols=3, figsize=(20,20), sharex=True, sharey=True, facecolor="white")
for year, ax in zip(range(2010, 2019), axes.flat):
ax.set_title(f"{ba_code} ({year})")
ax.set_xticks([])
ax.set_yticks([])
plot_gdf = (
ba_util_county_gdf
.assign(report_year=lambda x: x.report_date.dt.year)
.query(f"balancing_authority_code_eia=='{ba_code}'")
.query(f"report_year=='{year}'")
.drop_duplicates(subset="county_id_fips")
)
plot_gdf.plot(ax=ax, linewidth=0.1)
plt.show()_____no_output_____
</code>
## Merge Geometries with FERC 714
Now that we have a draft of wht the BA and Utility level territories look like, we can merge those with the FERC 714 Respondent ID table, and see how many leftovers there are, and whether the BA and Utility geometires play well together.
Before dissolving the boundaries between counties the output dataframe needs to have:
* `report_date`
* `respondent_id_ferc714`
* `eia_code`
* `respondent_type`
* `balancing_authority_id_eia`
* `utility_id_eia`
* `county_id_fips`
* `geometry`
* `balancing_authority_code_eia`
* `balancing_authority_name_eia`
* `respondent_name_ferc714`
* `utility_name_eia`
* `county_name_census`
* `state`
* `state_id_fips`_____no_output_____
<code>
utils_ferc714 = (
rids_ferc714.loc[
rids_ferc714.respondent_type == "utility",
["respondent_id_ferc714", "respondent_name_ferc714", "utility_id_eia", "respondent_type"]
]
)
bas_ferc714 = (
rids_ferc714.loc[
rids_ferc714.respondent_type == "balancing_authority",
["respondent_id_ferc714", "respondent_name_ferc714", "balancing_authority_id_eia", "respondent_type"]
]
)
null_ferc714 = (
rids_ferc714.loc[
rids_ferc714.respondent_type.isnull(),
["respondent_id_ferc714", "respondent_name_ferc714", "respondent_type"]
]
)
bas_ferc714_gdf = (
ba_util_county_gdf
.drop(["county"], axis="columns")
.merge(bas_ferc714, how="right")
)
utils_ferc714_gdf = (
ba_util_county_gdf
.drop(["balancing_authority_id_eia", "balancing_authority_code_eia", "balancing_authority_name_eia", "county"], axis="columns")
.drop_duplicates()
.merge(utils_ferc714, how="right")
)
rids_ferc714_gdf = (
pd.concat([bas_ferc714_gdf, utils_ferc714_gdf, null_ferc714])
.astype({
"county_id_fips": pd.StringDtype(),
"county_name_census": pd.StringDtype(),
"respondent_type": pd.StringDtype(),
"utility_id_eia": pd.Int64Dtype(),
"balancing_authority_id_eia": pd.Int64Dtype(),
"balancing_authority_code_eia": pd.StringDtype(),
"balancing_authority_name_eia": pd.StringDtype(),
"state": pd.StringDtype(),
"utility_name_eia": pd.StringDtype(),
})
)_____no_output_____display(rids_ferc714_gdf.info())
rids_ferc714_gdf.sample(10)_____no_output_____
</code>
## Check Geometries for Completeness
* How many balancing authorities do we have geometries for?
* How many utilities do we have geometries for?
* Do those geometries cover all of the entities that report in FERC 714?
* Do we have a geometry for every entity in every year in which it reports demand?_____no_output_____### Count BA & Util Geometries_____no_output_____
<code>
n_bas = len(rids_ferc714_gdf.balancing_authority_id_eia.unique())
logger.info(f"Found territories for {n_bas} unique Balancing Areas")
n_utils = len(rids_ferc714_gdf.loc[
(rids_ferc714_gdf.balancing_authority_id_eia.isnull()) &
(~rids_ferc714_gdf.utility_id_eia.isnull())
].utility_id_eia.unique())
logger.info(f"Found territories for {n_utils} Utilities outside of the BAs")_____no_output_____
</code>
### Identify Missing Geometries
* Within each year of historical data from 2010-2018, are there any entities (either BA or Utility) which **do** have hourly demand reported in the FERC 714, for whivh we do not have a historical geometry?
* How many of them are there?
* Why are they missing?
* Do we have the geometires in adjacent years and can we re-use them?
* Is it possible that the FERC 714 IDs correspond to a precursor entity, or one that was discontinued? E.g. if SWPP is missing in 2010, is that because the BA was reported in EIA as SPS in that year?
* How important are the missing geometries? Do the associated entities have a lot of demand associated with them in FERC 714?
* Can we use `ffill` or `backfill` on the `geometry` column in a GeoDataFrame?_____no_output_____
<code>
problem_ids = pd.DataFrame()
for year in range(2010, 2019):
this_year_gdf = (
rids_ferc714_gdf
.loc[(rids_ferc714_gdf.report_date.dt.year==year) & (~rids_ferc714_gdf.geometry.isnull())]
)
# All BA IDs which show up in FERC 714:
ba_ids_ferc714 = (
rids_ferc714
.loc[rids_ferc714.respondent_type=="balancing_authority",
"balancing_authority_id_eia"]
.unique()
)
# BA IDs which have a geometry in this year
ba_geom_ids = (
this_year_gdf
.balancing_authority_id_eia
.dropna().unique()
)
# BA IDs which have reported demand in this year
ba_demand_ids = (
dhpa_ferc714
.query("report_year==@year")
.query("demand_mwh>0.0")
.loc[dhpa_ferc714.eia_code.isin(ba_ids_ferc714)]
.eia_code.unique()
)
# Need to make the demand IDs clearly either utility of BA IDs. Whoops!
missing_ba_geom_ids = [x for x in ba_demand_ids if x not in ba_geom_ids]
logger.info(f"{len(missing_ba_geom_ids)} BA respondents w/o geometries in {year}")
problem_ids = problem_ids.append(
rids_ferc714
.loc[rids_ferc714.balancing_authority_id_eia.isin(missing_ba_geom_ids)]
.assign(year=year)
)
# All EIA Utility IDs which show up in FERC 714:
util_ids_ferc714 = (
rids_ferc714
.loc[rids_ferc714.respondent_type=="utility",
"utility_id_eia"]
.unique()
)
# EIA Utility IDs which have geometry information for this year
util_geom_ids = (
this_year_gdf
.utility_id_eia
.dropna().unique()
)
util_demand_ids = (
dhpa_ferc714
.query("report_year==@year")
.query("demand_mwh>0.0")
.loc[dhpa_ferc714.eia_code.isin(util_ids_ferc714)]
.eia_code.unique()
)
missing_util_geom_ids = [x for x in util_demand_ids if x not in util_geom_ids]
logger.info(f"{len(missing_util_geom_ids)} Utility respondents w/o geometries in {year}")
problem_ids = problem_ids.append(
rids_ferc714
.loc[rids_ferc714.utility_id_eia.isin(missing_util_geom_ids)]
.assign(year=year)
)_____no_output_____problem_ids.query("year==2010").query("respondent_type=='balancing_authority'")_____no_output_____
</code>
## Dissolve to BA or Util
* At this point we still have geometires at the county level.
* This is 150,000+ records.
* Really we just want a single geometry per respondent per year.
* Dissolve based on year and respondent_id_ferc714.
* Merge the annual per-respondent geometry with the rids_ferc714 which has more information
* Note that this takes about half an hour to run..._____no_output_____
<code>
%%time
dissolved_rids_ferc714_gdf = (
rids_ferc714_gdf.drop_duplicates(subset=["report_date", "county_id_fips", "respondent_id_ferc714"])
.dissolve(by=["report_date", "respondent_id_ferc714"])
.reset_index()
.loc[:, ["report_date", "respondent_id_ferc714", "geometry"]]
.merge(rids_ferc714, on="respondent_id_ferc714", how="outer")
)
#dissolved_rids_ferc714_gdf.to_file("planning_areas_ferc714.gpkg", driver="GPKG")_____no_output_____
</code>
### Select based on respondent type_____no_output_____
<code>
dissolved_utils = dissolved_rids_ferc714_gdf.query("respondent_type=='utility'")
dissolved_bas = dissolved_rids_ferc714_gdf.query("respondent_type=='balancing_authority'")_____no_output_____
</code>
### Nationwide BA / Util Maps
* Still want to add the US state boundaries / coastlines to this for context._____no_output_____
<code>
unwanted_ba_ids = (
112, # Alaska
133, # Alaska
178, # Hawaii
301, # PJM Dupe
302, # PJM Dupe
303, # PJM Dupe
304, # PJM Dupe
305, # PJM Dupe
306, # PJM Dupe
)
for report_date in pd.date_range(start="2010-01-01", end="2018-01-01", freq="AS"):
ba_ax = (
dissolved_bas
.query("report_date==@report_date")
.query("respondent_id_ferc714 not in @unwanted_ba_ids")
.plot(figsize=(20, 20), color="blue", alpha=0.25, linewidth=1)
)
plt.title(f"FERC 714 Balancing Authority Respondents {report_date}")
ctx.add_basemap(ba_ax)
util_ax = (
dissolved_utils
.query("report_date==@report_date")
.plot(figsize=(20, 20), color="red", alpha=0.25, linewidth=1)
)
plt.title(f"FERC 714 Utility Respondents {report_date}")
ctx.add_basemap(util_ax)
plt.show();_____no_output_____
</code>
### Per-respondent annual maps
* For each respondent make a grid of 9 (2010-2018)
* Show state lines in bg for context
* Limit bounding box by the respondent's territory_____no_output_____# Remaining Tasks_____no_output_____## Geometry Cleanup:
* Why do some respondents lack geometries in some years?
* Why do some respondents lack geometries in **all** years? (e.g. Tri-State G&T)
* Why do some counties have no BA or Utility coverage in some or all years?
* What combinations of years and respondents are missing?
* Compare what we've ended up doing to the Aufhammer paper again.
* Is there any need to use name-based matching between the Planning Area descriptions & EIA Service Territories?
* Problem BAs / Utilities:
* All the WAPA BAs
* PacifiCorp East / West
* Southern Company
* MISO (Some other IDs that seem related?)
* PJM (Early years seem out of bounds)_____no_output_____## FERC 714 Demand Time Series Cleanup
### Find broken data:
* Run Tyler Ruggles' anomaly detection code as improved by Greg Schivley
* What kind of anomalies are we finding? Are they a problem? What portion of the overall dataset do they represent?
### Repair data:
* How do we want to fill in the gaps?
* Ideally would be able to use the MICE technique that Tyler used, but we need to keep it all in Python.
* Can do much simpler rolling averages or something for the moment when there are small gaps just to have completeness.
* Should make this gap filling process modular -- use different techniques and see whether they do what we need._____no_output_____# Miscellaneous Notes_____no_output_____## FERC 714 Demand Irregularities
Unusual issues that need to be addressed, or demand discontinuities that may be useful in the context of aggregating historical demand into modern planning areas. Organized by FERC 714 Respondent ID:
* Missing demand data / weird zeroes
* 111: (2008)
* 125: (2015)
* 137: (2006)
* 139: (2006) Only the last hour of every day. Maybe 0-23 vs 1-24 reporting?
* 141: (2006, 2007, 2008, 2009, 2010)
* 148: (2006)
* 153: (2006)
* 154: (2006)
* 161: (all)
* 183: (2007, 2009)
* 208: (2008)
* 273: (2007, 2008)
* 283: (2007)
* 287: (2008-2012)
* 288: (2006)
* 289: (2009)
* 293: (2006)
* 294: (2006)
* 311: (2008-2011)
* Inverted Demand (Sign Errors):
* 156: (2006, 2007, 2008, 2009)
* 289: (2006-2008, 2010)
* Large demand discontinuities
* 107: Demand triples at end of 2006.
* 115: Two big step downs, 2007-2008, and 2011-2012
* 121: 50% increase at end of 2007.
* 128: Step up at end of 2007
* 133: Step down end of 2013 and again end of 2015
* 190: Demand doubled at end of 2008
* 214: 50% jump in early 2012.
* 256: big jump at end of 2006.
* 261: Big jump at end of 2008.
* 274: drop at end of 2007
* 275: Jump at end of 2007
* 287: Demand before and after big gap are very different.
* 299: Big drop at end of 2015
* 307: Jump at end of 2014
* 321: Jump at end of 2013_____no_output_____
| {
"repository": "catalyst-cooperative/electricity-demand-mapping",
"path": "notebooks/historical_planning_areas.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 11,
"size": 62976,
"hexsha": "d08fb725d6ee6497a4ec12c4b9527349bfbc7664",
"max_line_length": 259,
"avg_line_length": 36.5502031341,
"alphanum_fraction": 0.5658822409
} |
# Notebook from Py101/py101-assignments-moka1992
Path: 02/homework_day2.ipynb
<center>
<hr>
<h1>Python Crash Course</h1>
<h2>Master in Data Science - Sapienza University</h2>
<h2>Homework 2: Python Challenges</h2>
<h3>A.A. 2017/18</h3>
<h3>Tutor: Francesco Fabbri</h3>
<hr>
</center>
_____no_output_____# Instructions
So guys, here we are! **Finally** you're facing your first **REAL** homework. Are you ready to fight?
We're going to apply all the Pythonic stuff seen before AND EVEN MORE...
## Simple rules:
1. Don't touch the instructions, you **just have to fill the blank rows**.
2. This is supposed to be an exercise for improving your Pythonic Skills in a spirit of collaboration so...of course you can help your classmates and obviously get a really huge help as well from all the others (as the proverb says: "I get help from you and then you help me", right?!...)
3. **RULE OF THUMB** for you during the homework:
- *1st Step:* try to solve the problem alone
- *2nd Step:* googling random the answer
- *3rd Step:* ask to your colleagues
- *3rd Step:* screaming and complaining about life
- *4th Step:* ask to Tutors
## And the Prize? The Beer?The glory?!:
Guys the life is hard...in this Master it's even worse...
Soooo, since that you seem so smart I want to test you before the start of all the courses.
.
.
.
But not now.
You have to come prepared to the challenge, so right now solve these first 6 exercises, then it will be the time for **FIGHTING** and (for one of you) **DRINKING**.
_____no_output_____# Warm-up..._____no_output_____### 1. 12! is equal to..._____no_output_____
<code>
def fatt(n):
if(n == 0):
return 1
else:
return n*fatt(n-1)
fatt(12)_____no_output_____
</code>
### 2. More math...
Write a program which will find all such numbers which are divisible by 7 but are not a multiple of 5, between 0 and 1000 (both included). The numbers obtained should be printed in a comma-separated sequence on a single line. (range and CFS)_____no_output_____
<code>
ex_2=[str(x) for x in range (1001) if x%7 ==0 and x%5 !=0]
','.join(ex_2)_____no_output_____
</code>
### 2. Count capital letters
In this exercises you're going to deal with YOUR DATA. Indeed, in the list below there are stored your Favorite Tv Series. But, as you can see, there is something weird. There are too much CaPITal LeTTErs. Your task is to count the capital letters in all the strings and then print the total number of capital letters in all the list._____no_output_____
<code>
tv_series = ['Game of THRroneS',
'big bang tHeOrY',
'MR robot',
'WesTWorlD',
'fIRefLy',
"i haven't",
'HOW I MET your mothER',
'friENds',
'bRon broen',
'gossip girl',
'prISon break',
'breaking BAD']_____no_output_____count=0
for string in tv_series:
for letter in string:
if letter.lower() == letter:
pass
else:
count+=1
_____no_output_____count_____no_output_____
</code>
### 3. A remark
Using the list above, create a dictionary where the keys are Unique IDs and values the TV Series.
You have to do the exercise keeping in mind these 2 constraints:
1. The order of the IDs has to be **dependent on the alphabetical order of the titles**, i.e. 0: first_title_in_alphabetical_order and so on...
2. **Solve the mess** of the capital letter: we want them only at the start of the words ("prISon break" should be "Prison Break")
_____no_output_____
<code>
# write here your code
newlst = []
for x in tv_series:
x.title()
newlst.append(x.title())
newlst
_____no_output_____a=range(12)
b=sorted(newlst)
dict1=dict(zip(a,b))
dict1_____no_output_____
</code>
### 4. Dictionary to its maximum
Invert the keys with the values in the dictionary built before. _____no_output_____
<code>
# write here your code
inv= {v: k for k, v in dict1.items()}
inv_____no_output_____
</code>
Have you done in **one line of code**? If not, try now!_____no_output_____
<code>
# write here your code
already done :D_____no_output_____
</code>
### 4. Other boring math
Let's talk about our beloved exams. Starting from the exams and CFU below, are you able to compute the weighted mean of them?
Let's do it and print the result.
Description of the data:
exams[1] = $(title_1, grade_1)$
cfu[1] = $CFU_1$_____no_output_____
<code>
exams = [('BIOINFORMATICS', 29),
('DATA MANAGEMENT FOR DATA SCIENCE', 30),
('DIGITAL EPIDEMIOLOGY', 26),
('NETWORKING FOR BIG DATA AND LABORATORY',28),
('QUANTITATIVE MODELS FOR ECONOMIC ANALYSIS AND MANAGEMENT','30 e lode'),
('DATA MINING TECHNOLOGY FOR BUSINESS AND SOCIETY', 30),
('STATISTICAL LEARNING',30),
('ALGORITHMIC METHODS OF DATA MINING AND LABORATORY',30),
('FUNDAMENTALS OF DATA SCIENCE AND LABORATORY', 29)]
cfu = sum([6,6,6,9,6,6,6,9,9])_____no_output_____cfu_____no_output_____type(exams [0])_____no_output_____a=list(zip (*exams))[1]
a
_____no_output_____type (a)_____no_output_____singlecfu=([6,6,6,9,6,6,6,9,9])_____no_output_____b= (a[]*singlecfu[])/(cfu)
b_____no_output_____mean= dict2 [0]_____no_output_____
</code>
### 5. Palindromic numbers
Write a script which finds all the Palindromic numbers, in the range [0,**N**] (bounds included). The numbers obtained should be printed in a comma-separated sequence on a single line.
What is **N**?
Looking at the exercise before:
**N** = (Total number of CFU) x (Sum of all the grades)
(details: https://en.wikipedia.org/wiki/Palindromic_number)
_____no_output_____
<code>
def pali(n):
return str(n) == str(n)[::-1]
a=list(filter(pali, range(0,15876)))
print(a)[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 22, 33, 44, 55, 66, 77, 88, 99, 101, 111, 121, 131, 141, 151, 161, 171, 181, 191, 202, 212, 222, 232, 242, 252, 262, 272, 282, 292, 303, 313, 323, 333, 343, 353, 363, 373, 383, 393, 404, 414, 424, 434, 444, 454, 464, 474, 484, 494, 505, 515, 525, 535, 545, 555, 565, 575, 585, 595, 606, 616, 626, 636, 646, 656, 666, 676, 686, 696, 707, 717, 727, 737, 747, 757, 767, 777, 787, 797, 808, 818, 828, 838, 848, 858, 868, 878, 888, 898, 909, 919, 929, 939, 949, 959, 969, 979, 989, 999, 1001, 1111, 1221, 1331, 1441, 1551, 1661, 1771, 1881, 1991, 2002, 2112, 2222, 2332, 2442, 2552, 2662, 2772, 2882, 2992, 3003, 3113, 3223, 3333, 3443, 3553, 3663, 3773, 3883, 3993, 4004, 4114, 4224, 4334, 4444, 4554, 4664, 4774, 4884, 4994, 5005, 5115, 5225, 5335, 5445, 5555, 5665, 5775, 5885, 5995, 6006, 6116, 6226, 6336, 6446, 6556, 6666, 6776, 6886, 6996, 7007, 7117, 7227, 7337, 7447, 7557, 7667, 7777, 7887, 7997, 8008, 8118, 8228, 8338, 8448, 8558, 8668, 8778, 8888, 8998, 9009, 9119, 9229, 9339, 9449, 9559, 9669, 9779, 9889, 9999, 10001, 10101, 10201, 10301, 10401, 10501, 10601, 10701, 10801, 10901, 11011, 11111, 11211, 11311, 11411, 11511, 11611, 11711, 11811, 11911, 12021, 12121, 12221, 12321, 12421, 12521, 12621, 12721, 12821, 12921, 13031, 13131, 13231, 13331, 13431, 13531, 13631, 13731, 13831, 13931, 14041, 14141, 14241, 14341, 14441, 14541, 14641, 14741, 14841, 14941, 15051, 15151, 15251, 15351, 15451, 15551, 15651, 15751, 15851]
?filter_____no_output_____
</code>
### 6. StackOverflow_____no_output_____Let's start using your new best friend. Now I'm going to give other task, slightly more difficult BUT this time, just googling, you will find easily the answer on the www.stackoverflow.com. You can use the code there for solving the exercise BUT you have to understand the solution there **COMMENTING** the code, showing me you understood the thinking process behind the code._____no_output_____### 6. A
Show me an example of how to use **PROPERLY** the *Try - Except* statements_____no_output_____
<code>
# write here your code_____no_output_____
</code>
#### 6. B
Giving this list of words below, after copying in a variable, explain and provide me a code for obtaining a **Bag of Words** from them.
(Hint: use dictionaries and loops)_____no_output_____['theory', 'of', 'bron', 'firefly', 'thrones', 'break', 'bad', 'mother', 'firefly', "haven't", 'prison', 'big', 'friends', 'girl', 'westworld', 'bad', "haven't", 'gossip', 'thrones', 'your', 'big', 'how', 'friends', 'theory', 'your', 'bron', 'bad', 'bad', 'breaking', 'met', 'breaking', 'breaking', 'game', 'bron', 'your', 'breaking', 'met', 'bang', 'how', 'mother', 'bad', 'theory', 'how', 'i', 'friends', "haven't", 'of', 'of', 'gossip', 'i', 'robot', 'of', 'prison', 'bad', 'friends', 'friends', 'i', 'robot', 'bang', 'mother', 'bang', 'i', 'of', 'bad', 'friends', 'theory', 'i', 'friends', 'thrones', 'prison', 'theory', 'theory', 'big', 'of', 'bang', 'how', 'thrones', 'bang', 'theory', 'friends', 'game', 'bang', 'mother', 'broen', 'bad', 'game', 'break', 'break', 'bang', 'big', 'gossip', 'robot', 'met', 'i', 'game', 'your', 'met', 'bad', 'firefly', 'your']_____no_output_____
<code>
# write here your code_____no_output_____
</code>
#### 6. C
And now, write down a code which computes the first 10 Fibonacci numbers
(details: https://en.wikipedia.org/wiki/Fibonacci_number)_____no_output_____
<code>
# write here your code_____no_output_____
</code>
| {
"repository": "Py101/py101-assignments-moka1992",
"path": "02/homework_day2.ipynb",
"matched_keywords": [
"bioinformatics",
"epidemiology"
],
"stars": null,
"size": 271358,
"hexsha": "d08fbe8bcf0141c254eef32e30b10aada07736c9",
"max_line_length": 134373,
"avg_line_length": 399.0558823529,
"alphanum_fraction": 0.9364345256
} |
# Notebook from carparel/NTDS
Path: Assignments/1_network_science.ipynb
# [NTDS'19] assignment 1: network science
[ntds'19]: https://github.com/mdeff/ntds_2019
[Eda Bayram](https://lts4.epfl.ch/bayram), [EPFL LTS4](https://lts4.epfl.ch) and
[Nikolaos Karalias](https://people.epfl.ch/nikolaos.karalias), [EPFL LTS2](https://lts2.epfl.ch)._____no_output_____## Students
* Team: `<5>`
* `<Alice Bizeul, Gaia Carparelli, Antoine Spahr and Hugues Vinzant`_____no_output_____## Rules
Grading:
* The first deadline is for individual submissions. The second deadline is for the team submission.
* All team members will receive the same grade based on the team solution submitted on the second deadline.
* As a fallback, a team can ask for individual grading. In that case, solutions submitted on the first deadline are graded.
* Collaboration between team members is encouraged. No collaboration between teams is allowed.
Submission:
* Textual answers shall be short. Typically one to two sentences.
* Code has to be clean.
* You cannot import any other library than we imported.
Note that Networkx is imported in the second section and cannot be used in the first.
* When submitting, the notebook is executed and the results are stored. I.e., if you open the notebook again it should show numerical results and plots. We won't be able to execute your notebooks.
* The notebook is re-executed from a blank state before submission. That is to be sure it is reproducible. You can click "Kernel" then "Restart Kernel and Run All Cells" in Jupyter._____no_output_____## Objective
The purpose of this milestone is to explore a given dataset, represent it by network by constructing different graphs. In the first section, you will analyze the network properties. In the second section, you will explore various network models and find out the network model fitting the ones you construct from the dataset._____no_output_____## Cora Dataset
The [Cora dataset](https://linqs.soe.ucsc.edu/node/236) consists of scientific publications classified into one of seven research fields.
* **Citation graph:** the citation network can be constructed from the connections given in the `cora.cites` file.
* **Feature graph:** each publication in the dataset is described by a 0/1-valued word vector indicating the absence/presence of the corresponding word from the dictionary and its research field, given in the `cora.content` file. The dictionary consists of 1433 unique words. A feature graph can be constructed using the Euclidean distance between the feature vector of the publications.
The [`README`](data/cora/README) provides details about the content of [`cora.cites`](data/cora/cora.cites) and [`cora.content`](data/cora/cora.content)._____no_output_____## Section 1: Network Properties_____no_output_____
<code>
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
%matplotlib inline_____no_output_____
</code>
### Question 1: Construct a Citation Graph and a Feature Graph_____no_output_____Read the `cora.content` file into a Pandas DataFrame by setting a header for the column names. Check the `README` file._____no_output_____
<code>
column_list = ['paper_id'] + [str(i) for i in range(1,1434)] + ['class_label']
pd_content = pd.read_csv('data/cora/cora.content', delimiter='\t', names=column_list)
pd_content.head()_____no_output_____
</code>
Print out the number of papers contained in each of the reasearch fields.
**Hint:** You can use the `value_counts()` function._____no_output_____
<code>
pd_content['class_label'].value_counts()_____no_output_____
</code>
Select all papers from a field of your choice and store their feature vectors into a NumPy array.
Check its shape._____no_output_____
<code>
my_field = 'Neural_Networks'
features = pd_content[pd_content['class_label'] == my_field].drop(columns=['paper_id','class_label']).to_numpy()
features.shape_____no_output_____
</code>
Let $D$ be the Euclidean distance matrix whose $(i,j)$ entry corresponds to the Euclidean distance between feature vectors $i$ and $j$.
Using the feature vectors of the papers from the field which you have selected, construct $D$ as a Numpy array. _____no_output_____
<code>
distance = np.zeros([features.shape[0],features.shape[0]])
for i in range(features.shape[0]):
distance[i] = np.sqrt(np.sum((features[i,:] - features)**2, axis=1))_____no_output_____distance.shape_____no_output_____
</code>
Check the mean pairwise distance $\mathbb{E}[D]$._____no_output_____
<code>
# Mean on the upper triangle as the matrix is symetric (we also excluded the diagonal)
mean_distance = distance[np.triu_indices(distance.shape[1],1)].mean()
print('Mean euclidian distance between feature vectors of papers on Neural Networks: {}'.format(mean_distance))Mean euclidian distance between feature vectors of papers on Neural Networks: 5.696602496555962
</code>
Plot an histogram of the euclidean distances._____no_output_____
<code>
fig,ax = plt.subplots(1,1,figsize=(8, 8))
ax.hist(distance.flatten(), density=True, bins=20, color='salmon', edgecolor='black', linewidth=1);
ax.set_title("Histogram of Euclidean distances between Neural-networks papers")
ax.set_xlabel("Euclidian Distances")
ax.set_ylabel("Frequency")
ax.grid(True, which='major', axis='y')
ax.set_axisbelow(True)
plt.show()_____no_output_____
</code>
Now create an adjacency matrix for the papers by thresholding the Euclidean distance matrix.
The resulting (unweighted) adjacency matrix should have entries
$$ A_{ij} = \begin{cases} 1, \; \text{if} \; d(i,j)< \mathbb{E}[D], \; i \neq j, \\ 0, \; \text{otherwise.} \end{cases} $$
First, let us choose the mean distance as the threshold._____no_output_____
<code>
threshold = mean_distance
A_feature = np.where(distance < threshold, 1, 0)
np.fill_diagonal(A_feature,0)_____no_output_____
</code>
Now read the `cora.cites` file and construct the citation graph by converting the given citation connections into an adjacency matrix._____no_output_____
<code>
cora_cites = np.genfromtxt('data/cora/cora.cites', delimiter='\t')_____no_output_____papers = np.unique(cora_cites)
A_citation = np.zeros([papers.size, papers.size])
for i in range(cora_cites.shape[0]):
A_citation[np.where(papers==cora_cites[i,1]),np.where(papers==cora_cites[i,0])] = 1
A_citation.shape_____no_output_____
</code>
Get the adjacency matrix of the citation graph for the field that you chose.
You have to appropriately reduce the adjacency matrix of the citation graph._____no_output_____
<code>
# get the paper id from the chosen field
field_id = pd_content[pd_content['class_label'] == my_field]["paper_id"].unique()
# get the index of those paper in the A_citation matrix (similar to index on the vector 'papers')
field_citation_id = np.empty(field_id.shape[0]).astype(int)
for i in range(field_id.shape[0]):
field_citation_id[i] = np.where(papers == field_id[i])[0]
# get the A_citation matrix only at the index of the paper in the field
A_citation = A_citation[field_citation_id][:,field_citation_id]
A_citation.shape_____no_output_____
</code>
Check if your adjacency matrix is symmetric. Symmetrize your final adjacency matrix if it's not already symmetric._____no_output_____
<code>
# a matrix is symetric if it's the same as its transpose
print('The citation adjency matrix for papers on Neural Networks is symmetric: {}'.format(np.all(A_citation == A_citation.transpose())))
# symetrize it by taking the maximum between A and A.transposed
A_citation = np.maximum(A_citation, A_citation.transpose())
# To verify if the matrix is symetric
print('After modifiying the matrix, it is symmetric: {}'.format(np.count_nonzero(A_citation - A_citation.transpose())==0))The citation adjency matrix for papers on Neural Networks is symmetric: False
After modifiying the matrix, it is symmetric: True
</code>
Check the shape of your adjacency matrix again._____no_output_____
<code>
A_citation.shape_____no_output_____
</code>
### Question 2: Degree Distribution and Moments_____no_output_____What is the total number of edges in each graph?_____no_output_____
<code>
num_edges_feature = int(np.sum(A_feature)/2) # only half of the matrix
num_edges_citation = int(np.sum(A_citation)/2)
print(f"Number of edges in the feature graph: {num_edges_feature}")
print(f"Number of edges in the citation graph: {num_edges_citation}")Number of edges in the feature graph: 136771
Number of edges in the citation graph: 1175
</code>
Plot the degree distribution histogram for each of the graphs._____no_output_____
<code>
degrees_citation = A_citation.sum(axis=1) # degree = nbr of connections --> sum of ones over columns (axis=1)
degrees_feature = A_feature.sum(axis=1)
deg_hist_normalization = np.ones(degrees_citation.shape[0]) / degrees_citation.shape[0]
fig, axes = plt.subplots(1, 2, figsize=(16, 8))
axes[0].set_title('Citation graph degree distribution')
axes[0].hist(degrees_citation, weights=deg_hist_normalization, bins=20, color='salmon', edgecolor='black', linewidth=1);
axes[1].set_title('Feature graph degree distribution')
axes[1].hist(degrees_feature, weights=deg_hist_normalization, bins=20, color='salmon', edgecolor='black', linewidth=1);_____no_output_____
</code>
Calculate the first and second moments of the degree distribution of each graph._____no_output_____
<code>
cit_moment_1 = np.mean(degrees_citation)
cit_moment_2 = np.var(degrees_citation)
feat_moment_1 = np.mean(degrees_feature)
feat_moment_2 = np.var(degrees_feature)
print(f"1st moment of citation graph: {cit_moment_1:.3f}")
print(f"2nd moment of citation graph: {cit_moment_2:.3f}")
print(f"1st moment of feature graph: {feat_moment_1:.3f}")
print(f"2nd moment of feature graph: {feat_moment_2:.3f}")1st moment of citation graph: 2.873
2nd moment of citation graph: 15.512
1st moment of feature graph: 334.403
2nd moment of feature graph: 55375.549
</code>
What information do the moments provide you about the graphs?
Explain the differences in moments between graphs by comparing their degree distributions._____no_output_____### Answer :
**<br>The moments provide an idea of the sparsity of the graphs and the way the data is distributed using numerical values. The first moment is associated with the average value, the second to the variance of the distribution. A large 1st moment would mean a large number of edges per node on average, whereas the 2nd moment give information about the spread of the node's degree around the average value (variance).
<br> Citation degree distribution 1st moment lays around 2.8, and the second one is higher (around 15.5) with a large number of nodes (818). It thus means that there are many nodes with a small degree but there are also larger hubs, the nework is likely to be sparse. The feature degree distribution moments are larger, meaning a rather dense graph. There are many nodes with a degree of above 800 (15%), and since the network contains 818 nodes, it means that many nodes are almost saturated. The high variance shows that the degree distribution is more diffuse around the average value than for the citation graph.**_____no_output_____Select the 20 largest hubs for each of the graphs and remove them. Observe the sparsity pattern of the adjacency matrices of the citation and feature graphs before and after such a reduction._____no_output_____
<code>
smallest_feat_hub_idx = np.argpartition(degrees_feature, degrees_feature.shape[0]-20)[:-20]
smallest_feat_hub_idx.sort()
reduced_A_feature = A_feature[smallest_feat_hub_idx][:,smallest_feat_hub_idx]
smallest_cit_hub_idx = np.argpartition(degrees_citation, degrees_citation.shape[0]-20)[:-20]
smallest_cit_hub_idx.sort()
reduced_A_citation = A_citation[smallest_cit_hub_idx][:,smallest_cit_hub_idx]
fig, axes = plt.subplots(2, 2, figsize=(16, 16))
axes[0, 0].set_title('Feature graph: adjacency matrix sparsity pattern')
axes[0, 0].spy(A_feature);
axes[0, 1].set_title('Feature graph without top 20 hubs: adjacency matrix sparsity pattern')
axes[0, 1].spy(reduced_A_feature);
axes[1, 0].set_title('Citation graph: adjacency matrix sparsity pattern')
axes[1, 0].spy(A_citation);
axes[1, 1].set_title('Citation graph without top 20 hubs: adjacency matrix sparsity pattern')
axes[1, 1].spy(reduced_A_citation);_____no_output_____
</code>
Plot the new degree distribution histograms._____no_output_____
<code>
reduced_degrees_feat = reduced_A_feature.sum(axis=1)
reduced_degrees_cit = reduced_A_citation.sum(axis=1)
deg_hist_normalization = np.ones(reduced_degrees_feat.shape[0])/reduced_degrees_feat.shape[0]
fig, axes = plt.subplots(1, 2, figsize=(16, 8))
axes[0].set_title('Citation graph degree distribution')
axes[0].hist(reduced_degrees_cit, weights=deg_hist_normalization, bins=8, color='salmon', edgecolor='black', linewidth=1);
axes[1].set_title('Feature graph degree distribution')
axes[1].hist(reduced_degrees_feat, weights=deg_hist_normalization, bins=20, color='salmon', edgecolor='black', linewidth=1);_____no_output_____
</code>
Compute the first and second moments for the new graphs._____no_output_____
<code>
reduced_cit_moment_1 = np.mean(reduced_degrees_cit)
reduced_cit_moment_2 = np.var(reduced_degrees_cit)
reduced_feat_moment_1 = np.mean(reduced_degrees_feat)
reduced_feat_moment_2 = np.var(reduced_degrees_feat)
print(f"Citation graph first moment: {reduced_cit_moment_1:.3f}")
print(f"Citation graph second moment: {reduced_cit_moment_2:.3f}")
print(f"Feature graph first moment: {reduced_feat_moment_1:.3f}")
print(f"Feature graph second moment: {reduced_feat_moment_2:.3f}")Citation graph first moment: 1.972
Citation graph second moment: 2.380
Feature graph first moment: 302.308
Feature graph second moment: 50780.035
</code>
Print the number of edges in the reduced graphs._____no_output_____
<code>
num_edges_reduced_feature = int(np.sum(reduced_A_feature)/2)
num_edges_reduced_citation = int(np.sum(reduced_A_citation)/2)
print(f"Number of edges in the reduced feature graph: {num_edges_reduced_feature}")
print(f"Number of edges in the reduced citation graph: {num_edges_reduced_citation}")Number of edges in the reduced feature graph: 120621
Number of edges in the reduced citation graph: 787
</code>
Is the effect of removing the hubs the same for both networks? Look at the percentage changes for each moment. Which of the moments is affected the most and in which graph? Explain why.
**Hint:** Examine the degree distributions._____no_output_____
<code>
change_cit_moment_1 = (reduced_cit_moment_1-cit_moment_1)/cit_moment_1
change_cit_moment_2 = (reduced_cit_moment_2-cit_moment_2)/cit_moment_2
change_feat_moment_1 = (reduced_feat_moment_1-feat_moment_1)/feat_moment_1
change_feat_moment_2 = (reduced_feat_moment_2-feat_moment_2)/feat_moment_2
print(f"% Percentage of change for citation 1st moment: {change_cit_moment_1*100:.3f}")
print(f"% Percentage of change for citation 2nd moment: {change_cit_moment_2*100:.3f}")
print(f"% Percentage of change for feature 1st moment: {change_feat_moment_1*100:.3f}")
print(f"% Percentage of change for feature 2nd moment: {change_feat_moment_2*100:.3f}")% Percentage of change for citation 1st moment: -31.343
% Percentage of change for citation 2nd moment: -84.656
% Percentage of change for feature 1st moment: -9.598
% Percentage of change for feature 2nd moment: -8.299
</code>
### Answer :
**After looking of the percentage of change of moments, we can notice that the removal of the 20 largest hubs affects way more the citation degree distribution than the feature degree distribution. The 2nd moment of the citation degree distribution is reduced by almost 85%, this can be due to the fact that the percentage of nodes with a high degree was lower for the citation than for the feature network and they were thus all removed as part of the 20 largest hubs, resulting in a much lower variance (less spread distribution).**
**In conclusion, the new citation distribution is more condensed around its mean value, the degree landscape is hence more uniform. Regarding the feature degree distribution, a consistent number of nodes remain hotspots.**_____no_output_____### Question 3: Pruning, sparsity, paths_____no_output_____By adjusting the threshold of the euclidean distance matrix, prune the feature graph so that its number of edges is roughly close (within a hundred edges) to the number of edges in the citation graph._____no_output_____
<code>
threshold = np.max(distance)
diagonal = distance.shape[0]
threshold_flag = False
epsilon = 0.01*threshold
tolerance = 250
while threshold > 0 and not threshold_flag:
threshold -= epsilon # steps of 1% of maximum
n_edge = int((np.count_nonzero(np.where(distance < threshold, 1, 0)) - diagonal)/2)
# within a hundred edges
if abs(num_edges_citation - n_edge) < tolerance:
threshold_flag = True
print(f'Found a threshold : {threshold:.3f}')
A_feature_pruned = np.where(distance < threshold, 1, 0)
np.fill_diagonal(A_feature_pruned, 0)
num_edges_feature_pruned = int(np.count_nonzero(A_feature_pruned)/2)
print(f"Number of edges in the feature graph: {num_edges_feature}")
print(f"Number of edges in the feature graph after pruning: {num_edges_feature_pruned}")
print(f"Number of edges in the citation graph: {num_edges_citation}")Found a threshold : 2.957
Number of edges in the feature graph: 136771
Number of edges in the feature graph after pruning: 1386
Number of edges in the citation graph: 1175
</code>
### Remark:
**The distribution of distances (which is a distribution of integers) for this particular field (Neural Networks) doesn't allow a configuration where the number of edges is roughly close (whithin a hundred of edges) to the citation distribution. This is independant of the chosen epsilon . The closest match is 250 edges apart.**_____no_output_____Check your results by comparing the sparsity patterns and total number of edges between the graphs._____no_output_____
<code>
fig, axes = plt.subplots(1, 2, figsize=(12, 6))
axes[0].set_title('Citation graph sparsity')
axes[0].spy(A_citation);
axes[1].set_title('Feature graph sparsity')
axes[1].spy(A_feature_pruned);_____no_output_____
</code>
Let $C_{k}(i,j)$ denote the number of paths of length $k$ from node $i$ to node $j$.
We define the path matrix $P$, with entries
$ P_{ij} = \displaystyle\sum_{k=0}^{N}C_{k}(i,j). $_____no_output_____Calculate the path matrices for both the citation and the unpruned feature graphs for $N =10$.
**Hint:** Use [powers of the adjacency matrix](https://en.wikipedia.org/wiki/Adjacency_matrix#Matrix_powers)._____no_output_____
<code>
def path_matrix(A, N=10):
"""Compute the path matrix for matrix A for N power """
power_A = [A]
for i in range(N-1):
power_A.append(np.matmul(power_A[-1], A))
return np.stack(power_A, axis=2).sum(axis=2)_____no_output_____path_matrix_citation = path_matrix(A_citation)
path_matrix_feature = path_matrix(A_feature)_____no_output_____
</code>
Check the sparsity pattern for both of path matrices._____no_output_____
<code>
fig, axes = plt.subplots(1, 2, figsize=(16, 9))
axes[0].set_title('Citation Path matrix sparsity')
axes[0].spy(path_matrix_citation);
axes[1].set_title('Feature Path matrix sparsity')
axes[1].spy(path_matrix_feature, vmin=0, vmax=1); #scaling the color bar_____no_output_____
</code>
Now calculate the path matrix of the pruned feature graph for $N=10$. Plot the corresponding sparsity pattern. Is there any difference?_____no_output_____
<code>
path_matrix_pruned = path_matrix(A_feature_pruned)
plt.figure(figsize=(12, 6))
plt.title('Feature Path matrix sparsity')
plt.spy(path_matrix_pruned);_____no_output_____
</code>
### Your answer here:
<br> **Many combinations of nodes have a path matrix value of zero now, meaning that they are not within the reach of N = 10 nodes from another node. This makes sense as many edges were removed in the pruning procedure (from 136000 to 1400). Hence, the number of possible paths from i to j was reduced, reducing at the same time the amount of paths of size N. The increase of the sparsity of the adjency matrix increases the diameter of a network.**_____no_output_____Describe how you can use the above process of counting paths to determine whether a graph is connected or not. Is the original (unpruned) feature graph connected?_____no_output_____### Answer:
<br> **The graph is connected if all points are within the reach of others. In others words, if when increasing $N$, we are able to reach a point where the path matrix doesn't contain any null value, it means that the graph is connected. Therefore, even if the path matrix has some null values it can be connected, this depends on the chosen N-value.
<br> For example, if 20 nodes are aligned and linked then we know that all point are reachable. Even though, the number of paths of length 10 between the first and the last node remain 0.**_____no_output_____If the graph is connected, how can you guess its diameter using the path matrix?_____no_output_____### Answer :
<br> **The diameter coresponds to the minimum $N$ ($N$ being a non negative integer) for which the path matrix does not contain any null value.**_____no_output_____If any of your graphs is connected, calculate the diameter using that process._____no_output_____
<code>
N=0
diameter = None
d_found = False
while not d_found:
N += 1
P = path_matrix(A_feature, N)
if np.count_nonzero(P == 0) == 0: # if there are no zero in P
d_found = True
diameter = N
print(f"The diameter of the feature graph (which is connected) is: {diameter}")The diameter of the feature graph (which is connected) is: 2
</code>
Check if your guess was correct using [NetworkX](https://networkx.github.io/documentation/stable/reference/algorithms/generated/networkx.algorithms.distance_measures.diameter.html).
Note: usage of NetworkX is only allowed in this part of Section 1._____no_output_____
<code>
import networkx as nx
feature_graph = nx.from_numpy_matrix(A_feature)
print(f"Diameter of feature graph according to networkx: {nx.diameter(feature_graph)}")Diameter of feature graph according to networkx: 2
</code>
## Section 2: Network Models_____no_output_____In this section, you will analyze the feature and citation graphs you constructed in the previous section in terms of the network model types.
For this purpose, you can use the NetworkX libary imported below._____no_output_____
<code>
import networkx as nx_____no_output_____
</code>
Let us create NetworkX graph objects from the adjacency matrices computed in the previous section._____no_output_____
<code>
G_citation = nx.from_numpy_matrix(A_citation)
print('Number of nodes: {}, Number of edges: {}'. format(G_citation.number_of_nodes(), G_citation.number_of_edges()))
print('Number of self-loops: {}, Number of connected components: {}'. format(G_citation.number_of_selfloops(), nx.number_connected_components(G_citation)))Number of nodes: 818, Number of edges: 1175
Number of self-loops: 0, Number of connected components: 104
</code>
In the rest of this assignment, we will consider the pruned feature graph as the feature network._____no_output_____
<code>
G_feature = nx.from_numpy_matrix(A_feature_pruned)
print('Number of nodes: {}, Number of edges: {}'. format(G_feature.number_of_nodes(), G_feature.number_of_edges()))
print('Number of self-loops: {}, Number of connected components: {}'. format(G_feature.number_of_selfloops(), nx.number_connected_components(G_feature)))Number of nodes: 818, Number of edges: 1386
Number of self-loops: 0, Number of connected components: 684
</code>
### Question 4: Simulation with Erdős–Rényi and Barabási–Albert models_____no_output_____Create an Erdős–Rényi and a Barabási–Albert graph using NetworkX to simulate the citation graph and the feature graph you have. When choosing parameters for the networks, take into account the number of vertices and edges of the original networks._____no_output_____The number of nodes should exactly match the number of nodes in the original citation and feature graphs._____no_output_____
<code>
assert len(G_citation.nodes()) == len(G_feature.nodes())
n = len(G_citation.nodes())
print('The number of nodes ({}) matches the original number of nodes: {}'.format(n,n==A_citation.shape[0]))The number of nodes (818) matches the original number of nodes: True
</code>
The number of match shall fit the average of the number of edges in the citation and the feature graph._____no_output_____
<code>
m = np.round((G_citation.size() + G_feature.size()) / 2)
print('The number of match ({}) fits the average number of edges: {}'.format(m,m==np.round(np.mean([num_edges_citation,num_edges_feature_pruned]))))The number of match (1280.0) fits the average number of edges: True
</code>
How do you determine the probability parameter for the Erdős–Rényi graph?_____no_output_____### Answer:
**<br>Based on the principles ruling random networks (no preferential attachment but a random attachment), the expected number of edges is given by : $\langle L \rangle = p\frac{N(N-1)}{2}$ , where $\langle L \rangle$ is the average number of edges, $N$, the number of nodes and $p$, the probability parameter.
<br> Therefore we can get $p$ from the number of edges we want and the number of nodes we have : $ p = \langle L \rangle\frac{2}{N(N-1)}$
<br> The number of expected edges is given by $m$ in our case (defined as being the average of edges between the two original networks). $N$ is the same as in the original graphs**_____no_output_____
<code>
p = m*2/(n*(n-1))
G_er = nx.erdos_renyi_graph(n, p)_____no_output_____
</code>
Check the number of edges in the Erdős–Rényi graph._____no_output_____
<code>
print('My Erdos-Rényi network that simulates the citation graph has {} edges.'.format(G_er.size()))My Erdos-Rényi network that simulates the citation graph has 1238 edges.
</code>
How do you determine the preferential attachment parameter for Barabási–Albert graphs?_____no_output_____### Answer :
<br>**The Barabasi-Albert model uses growth and preferential attachement to build a scale-free network. The network is constructed by progressivly adding nodes to the network and adding a fixed number of edges, q, to each node added. Those edges are preferentially drawn towards already existing nodes with a high degree (preferential attachment).
<br> By the end of the process, the network contains n nodes and hence $n * q$ edges. Knowing that the final number of edges is defined by $m$, the parameter $q = m/n$**_____no_output_____
<code>
q = int(m/n)
G_ba = nx.barabasi_albert_graph(n, q)_____no_output_____
</code>
Check the number of edges in the Barabási–Albert graph._____no_output_____
<code>
print('My Barabási-Albert network that simulates the citation graph has {} edges.'.format(G_ba.size()))My Barabási-Albert network that simulates the citation graph has 817 edges.
</code>
### Question 5: Giant Component_____no_output_____Check the size of the largest connected component in the citation and feature graphs._____no_output_____
<code>
giant_citation = max(nx.connected_component_subgraphs(G_citation), key=len)
print('The giant component of the citation graph has {} nodes and {} edges.'.format(giant_citation.number_of_nodes(), giant_citation.size()))The giant component of the citation graph has 636 nodes and 1079 edges.
giant_feature = max(nx.connected_component_subgraphs(G_feature), key=len)
print('The giant component of the feature graph has {} nodes and {} edges.'.format(giant_feature.number_of_nodes(), giant_feature.size()))The giant component of the feature graph has 117 nodes and 1364 edges.
</code>
Check the size of the giant components in the generated Erdős–Rényi graph._____no_output_____
<code>
giant_er = max(nx.connected_component_subgraphs(G_er), key=len)
print('The giant component of the Erdos-Rényi network has {} nodes and {} edges.'.format(giant_er.number_of_nodes(), giant_er.size()))The giant component of the Erdos-Rényi network has 771 nodes and 1234 edges.
</code>
Let us match the number of nodes in the giant component of the feature graph by simulating a new Erdős–Rényi network.
How do you choose the probability parameter this time?
**Hint:** Recall the expected giant component size from the lectures._____no_output_____### Answer :
**<br> We can see the average degree of a network/each node as being the probability p multiplied by the amount of nodes to which it can connect ($N-1$, because the network is not recursive, N the number of nodes,): $\langle k \rangle = p . (N-1)$
<br>We can establish that, $S$, the portion of nodes in the Giant Component is given by $S = \frac{N_{GC}}{N}$ ($N_{GC}$, the number of nodes in the giant component) and $u$, the probability that $i$ is not linked to the GC via any other node $j$. U is also the portion of nodes not in the GC : $u = 1 - S$.
<br>Knowing that for one $j$ among the $N-1$ nodes, this probability can be seen as the probability to have no link with $j$ if $j$ is in the GC or having a link with $j$ if $j$ is not being in the GC ($p . u$), we can establish that: $u = (1 - p - p.u)^{N-1}$
<br> Using the relationship mentionned above : $p = \frac{<k>}{(N-1)}$, applying a log on both sides of the relationship and taking the Taylor expansion :
<br>$S = 1-e^{-\langle k \rangle S}$
<br> => $e^{-\langle k \rangle S} =1-S$
<br> => $-\langle k \rangle S = ln(1-S)$
<br> => $\langle k \rangle = -\frac{1}{S}ln(1-S)$
<br> This expression of the average degree is then used to define $p$ : $p = \frac{\langle k \rangle}{N-1} = \frac{-\frac{1}{S}ln(1-S)}{N-1}$**_____no_output_____
<code>
GC_node = giant_feature.number_of_nodes()
S = GC_node/n
avg_k = -1/S*np.log(1-S)_____no_output_____p_new = avg_k/(n-1)
G_er_new = nx.erdos_renyi_graph(n, p_new)_____no_output_____
</code>
Check the size of the new Erdős–Rényi network and its giant component._____no_output_____
<code>
print('My new Erdos Renyi network that simulates the citation graph has {} edges.'.format(G_er_new.size()))
giant_er_new = max(nx.connected_component_subgraphs(G_er_new), key=len)
print('The giant component of the new Erdos-Rényi network has {} nodes and {} edges.'.format(giant_er_new.number_of_nodes(), giant_er_new.size()))My new Erdos Renyi network that simulates the citation graph has 437 edges.
The giant component of the new Erdos-Rényi network has 208 nodes and 210 edges.
</code>
### Question 6: Degree Distributions_____no_output_____Recall the degree distribution of the citation and the feature graph._____no_output_____
<code>
fig, axes = plt.subplots(1, 2, figsize=(15, 6),sharex = True)
axes[0].set_title('Citation graph')
citation_degrees = [deg for (node, deg) in G_citation.degree()]
axes[0].hist(citation_degrees, bins=20, color='salmon', edgecolor='black', linewidth=1);
axes[1].set_title('Feature graph')
feature_degrees = [deg for (node, deg) in G_feature.degree()]
axes[1].hist(feature_degrees, bins=20, color='salmon', edgecolor='black', linewidth=1);_____no_output_____
</code>
What does the degree distribution tell us about a network? Can you make a prediction on the network model type of the citation and the feature graph by looking at their degree distributions?_____no_output_____### Answer :
<br> **The degree distribution tell us about the sparsity of a network.
Both show a power law degree distribution (many nodes with few edges but a couple of big components with a lot of edges). Hence they should fall in the scale-free network category which have a similar degree distribution. Therefore the Barabasi-Albert model which is a random scale free model is probably the best match.
<br> Those distributions are indeed power laws as it could be seen by the linear behavior of the distribution using a log scale.**_____no_output_____Now, plot the degree distribution historgrams for the simulated networks._____no_output_____
<code>
fig, axes = plt.subplots(1, 3, figsize=(20, 8))
axes[0].set_title('Erdos-Rényi network')
er_degrees = [deg for (node, deg) in G_er.degree()]
axes[0].hist(er_degrees, bins=10, color='salmon', edgecolor='black', linewidth=1)
axes[1].set_title('Barabási-Albert network')
ba_degrees = [deg for (node, deg) in G_ba.degree()]
axes[1].hist(ba_degrees, bins=10, color='salmon', edgecolor='black', linewidth=1)
axes[2].set_title('new Erdos-Rényi network')
er_new_degrees = [deg for (node, deg) in G_er_new.degree()]
axes[2].hist(er_new_degrees, bins=6, color='salmon', edgecolor='black', linewidth=1)
plt.show()_____no_output_____
</code>
In terms of the degree distribution, is there a good match between the citation and feature graphs and the simulated networks?
For the citation graph, choose one of the simulated networks above that match its degree distribution best. Indicate your preference below._____no_output_____### Answer :
<br> **Regarding the feature network, none of the distributions above matche the range of degrees of the feature network. Also none of the above distributions, model the large portion of hotspots seen in the feature graph. <br>Regarding the citation network, the Barabasi-Albert network seem to be a good match. Indeed, the range of values as well as the power-law shape of the model is close to the distribution of the citation graph showed earlier. Hence, a scale free model seems to be the best match to model the citation network for the Neural Networks field.**_____no_output_____You can also simulate a network using the configuration model to match its degree disctribution exactly. Refer to [Configuration model](https://networkx.github.io/documentation/stable/reference/generated/networkx.generators.degree_seq.configuration_model.html#networkx.generators.degree_seq.configuration_model).
Let us create another network to match the degree distribution of the feature graph. _____no_output_____
<code>
G_config = nx.configuration_model(feature_degrees)
print('Configuration model has {} nodes and {} edges.'.format(G_config.number_of_nodes(), G_config.size()))Configuration model has 818 nodes and 1386 edges.
</code>
Does it mean that we create the same graph with the feature graph by the configuration model? If not, how do you understand that they are not the same?_____no_output_____### Answer :
<br> **No we don't create the same graph, the number of edges, nodes and degree distribution can be the same but the links can be different. For example, in a group of three papers, various configurations are possible using only 2 links.
<br> Also the function used to create this model considers self loops and paralell edges which is not the case for the real feature graph. Hence the network resulting from this modelisation will most probably not be identical to the original graph.**_____no_output_____### Question 7: Clustering Coefficient_____no_output_____Let us check the average clustering coefficient of the original citation and feature graphs. _____no_output_____
<code>
nx.average_clustering(G_citation)_____no_output_____nx.average_clustering(G_feature)_____no_output_____
</code>
What does the clustering coefficient tell us about a network? Comment on the values you obtain for the citation and feature graph._____no_output_____### Answer :
**<br>Clustering coefficient is linked to the presence of subgroups (or clusters) in the network. A high clustering coefficient means that a node is very likely to be part of a subgroup. Here we can observe that the clustering coefficient of the citation graph is higher (almost double) than the one of the feature graph, this can highlight the fact that citations are more likely to form subgroups than feature.**
_____no_output_____Now, let us check the average clustering coefficient for the simulated networks._____no_output_____
<code>
nx.average_clustering(G_er)_____no_output_____nx.average_clustering(G_ba)_____no_output_____nx.average_clustering(nx.Graph(G_config))_____no_output_____
</code>
Comment on the values you obtain for the simulated networks. Is there any good match to the citation or feature graph in terms of clustering coefficient?_____no_output_____### Answer :
<br> **No, there is not any match. The clustering coefficients are rather small compared to the ones for feature and citation graphs. Random networks have generally small clustering coefficients because they don't tend to form subgroups as the pairing is random.**_____no_output_____Check the other [network model generators](https://networkx.github.io/documentation/networkx-1.10/reference/generators.html) provided by NetworkX. Which one do you predict to have a better match to the citation graph or the feature graph in terms of degree distribution and clustering coefficient at the same time? Justify your answer._____no_output_____### Answer :
<br> **Based on the course notes about Watts Strogatz model which is a extension of the random network model generating small world properties and high clustering, we tested the watts_strogatz_graph function provided by NetworkX. We used the average degree ($k = m*2/n$) as an initial guess of the number of nearest neighbours to which we connect each node. We then modulated the rewiring probability to find a good match. Results did not show any satisfying match for the clustering coefficient (it was always rather low compared to the original networks). We then tuned parameter k by increasing it for a fixed p of 0.5 (corresponding to the small-world property). k was originally very low and we wanted to increase the occurence of clusters. At k =100, the clustering coefficient matched our expectations (being close to the clustering coeffients of the two distributions) but the distribtion did not match a powerlaw. In conclusion, watts-strogratz was left aside as no combination of parameters enabled to match the clustering coefficent as well as the shape of the distribution.
<br>After scrolling through the documention of NetworkX, we came across the power_law_cluster function. According to the documentation, parameter n is the number of nodes, (n = 818 in our case). The second parameter, k, is the _number of random edges to add to each new node_ which we chose to be the average degree of the original graph as an initial guess. Parameter p, the probability of connecting two nodes which already share a common neighbour (forming a triangle) is chosen to be the average of average clustering coefficient across the original distributions. This yield a clustering coefficient that was a bit low compared with our expectations. We therefore tuned this parameter to better match the coefficients, results showed that a good comprise was reached at p = 0.27.** _____no_output_____If you find a better fit, create a graph object below for that network model. Print the number of edges and the average clustering coefficient. Plot the histogram of the degree distribution._____no_output_____
<code>
k = m*2/n
p = (nx.average_clustering(G_citation) + nx.average_clustering(G_feature))*0.8
G_pwc = nx.powerlaw_cluster_graph(n, int(k), p)
print('Power law cluster model has {} edges.'.format(G_pwc.size()))
print('Power law cluster model has a clustering coefficient of {}'.format(nx.average_clustering(G_pwc)))
print('Citation model has {} edges.'.format(G_citation.size()))
print('Citation model has a clustering coefficient of {}'.format(nx.average_clustering(G_citation)))
print('Feature model has {} edges.'.format(G_feature.size()))
print('Feature model has a clustering coefficient of {}'.format(nx.average_clustering(G_feature)))
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True, sharex=True)
axs[0].set_title('PWC graph')
ws_degrees = [deg for (node, deg) in G_pwc.degree()]
axs[0].hist(ws_degrees, bins=20, color='salmon', edgecolor='black', linewidth=1)
axs[1].set_title('Citation graph')
citation_degrees = [deg for (node, deg) in G_citation.degree()]
axs[1].hist(citation_degrees, bins=20, color='salmon', edgecolor='black', linewidth=1)
axs[2].set_title('Feature graph')
feature_degree = [deg for (node, deg) in G_feature.degree()]
axs[2].hist(feature_degree, bins=20, color='salmon', edgecolor='black', linewidth=1)
plt.show()Power law cluster model has 2440 edges.
Power law cluster model has a clustering coefficient of 0.16384898371644135
Citation model has 1175 edges.
Citation model has a clustering coefficient of 0.21693567980632222
Feature model has 1386 edges.
Feature model has a clustering coefficient of 0.1220744470334593
</code>
Comment on the similarities of your match._____no_output_____### Answer :
<br> **At this point, the decay of the power law distribution (PWC) had an intermediate behavior between the citation and the feature graph and the clustering coefficient which fall in between the original graphs (~0.17). The degree range is roughly equilvalent in all three above distributions.
<br> We hence found a model that satisfies both the clustering coefficients and the degree distribution. Also the final model raises distributions which have the expected intermediate behavior in comparison with the original distributions.**_____no_output_____
| {
"repository": "carparel/NTDS",
"path": "Assignments/1_network_science.ipynb",
"matched_keywords": [
"Salmon"
],
"stars": null,
"size": 371922,
"hexsha": "d0928c376aad98853888a51491be9369822ba32b",
"max_line_length": 127916,
"avg_line_length": 179.1531791908,
"alphanum_fraction": 0.8946472647
} |
# Notebook from acadena-repo/MACHINE-LEARNING-TOP-ALGORITHMS-SERIES
Path: ML SERIES - NAIVE BAYES/MACHINE LEARNING TOP ALGORITHMS - NAIVE BAYES CLASSIFIER.ipynb
# BAYES CLASSIFIERS
For any classifier $f:{X \to Y}$, it's prediction error is:
$P(f(x) \ne Y) = \mathbb{E}[ \mathbb{1}(f(X) \ne Y)] = \mathbb{E}[\mathbb{E}[ \mathbb{1}(f(X) \ne Y)|X]]$
For each $x \in X$,
$$\mathbb{E}[ \mathbb{1}(f(X) \ne Y)|X = x] = \sum\limits_{y \in Y} P(Y = y|X = x) \cdot \mathbb{1}(f(x) \ne y)$$
The above quantity is minimized for this particular $x \in X$ when,
$$f(x) = \underset{y \in Y}{argmax} \space P(Y = y|X = x) \space \star$$
A classifier $f$ with property $ \star$ for all $x \in X$ is called the `Bayes Classifier`
_____no_output_____Under the assumption $(X,Y) \overset{iid}{\sim} P$, the optimal classifier is:
$$f^{\star}(x) = \underset{y \in Y}{argmax} \space P(Y = y|X = x)$$
And from _Bayes Rule_ we equivalently have:
$$f^{\star}(x) = \underset{y \in Y}{argmax} \space P(Y = y) \space P(X = x|Y = y)$$
Where
- $P(Y =y)$ is called _the class prior_
- $P(X = x|Y= y)$ is called _the class conditional distribution_ of $X$
Assuming $X = \mathbb{R}, Y = \{ 0,1 \}$, and the distribution of $P \space \text{of} \space (X,Y)$ is as follows:
- _Class prior_: $P(Y = y) = \pi_y, y \in \{ 0,1 \}$
- _Class conditional density_ for class $y \in \{ 0,1 \}: p_y (x) = N(x|\mu_y,\sigma^2_y)$
$$f^{\star}(x) = \underset{y \in \{ 0,1 \}}{argmax} \space P(Y = y) \space P(X = x|Y = y) =
\begin{cases}
1 & \text{if} \space \frac{\pi_1}{\sigma_1}\space exp[- \frac{(x - \mu_1)^2}{2 \sigma^2_1}] > \frac{\pi_0}{\sigma_0}\space exp[- \frac{(x - \mu_0)^2}{2 \sigma^2_0}]\\
0 & \text{otherwise}
\end{cases}$$_____no_output_____### _Bayes Classifier_
_____no_output_____The `Bayes Classifier` has the smallest prediction error of all classifiers. The problem is that we need to know the distribution of $P$ in order to construct the `Bayes Classifier`_____no_output_____# NAIVE BAYES CLASSIFIER
A simplifying assumtion that the features values are conditionally independent given the label, the probability of observing the conjunction $x_1, x_2, x_3, ..., x_d$ is the product of the probabilities for the individual features:
$$ p(x_1, x_2, x_3, ..., x_d|y) = \prod \limits_j \space p(x_j|y)$$
Then the `Naive Bayes Classifier` is defined as:
$$f^{\star}(x) = \underset{y \in Y}{argmax} \space p(y) \space \prod \limits_j \space p(x_j|y)$$
We can estimate these two terms based on the **frequency counts** in the dataset. If the features are real-valued, Naive Bayes can be extended assuming that features follow a Gaussian distribution. This extension is called `Gaussian Naive Bayes`. Other functions can be used to estimate the distribution but the Gaussian distribution is the easiest to work with due to we only need to estimate the mean and the standard deviation from the dataset.
Ok, let's start with the implementation of `Gaussian Naive Bayes` from scratch._____no_output_____
<code>
##IMPORTING ALL NECESSARY SUPPORT LIBRARIES
import math as mt
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline_____no_output_____def separate_by_label(dataset):
separate = dict()
for i in range(len(dataset)):
row = dataset[i]
label = row[-1]
if (label not in separate):
separate[label] = list()
separate[label].append(row)
return separate_____no_output_____def mean(list_num):
return sum(list_num)/len(list_num)_____no_output_____def stdv(list_num):
mu = mean(list_num)
var = sum([(x - mu)**2 for x in list_num])/(len(list_num) - 1)
return mt.sqrt(var)_____no_output_____def stats_per_feature(ds):
'''
argument:
> ds: 1-D Array with the all data separated by class
returns:
> stats: 1-D Array with statistics summary for each feature
'''
stats = [(mean(col), stdv(col), len(col)) for col in zip(*ds)]
del(stats[-1])
return stats_____no_output_____def summary_by_class(dataset):
sep_label = separate_by_label(dataset)
summary = dict()
for label, rows in sep_label.items():
summary[label] = stats_per_feature(rows)
return summary_____no_output_____def gaussian_pdf(mean, stdv, x):
_exp = mt.exp(-1*((x - mean)**2/(2*stdv**2)))
return (1/(mt.sqrt(2 * mt.pi)*stdv)) * _exp_____no_output_____
</code>
Now it is time to use the statistics calculated from the data to calculate probabilities for new data.
Probabilities are calculated separately for each class, so we calculate the probability that a new piece of data belongs to the first class, then calculate the probability that it belongs to the second class, and so on for all the classes.
For example, if we have two inputs $x_1 and \space x_2$ the calculation of the probability that those belong to class = _y_ is:
$$P(class = y|x_1,x_2) = P(x_1|class = y) \cdot P(x_2|class = y) \cdot P(class = y)$$_____no_output_____
<code>
def class_probabilities(summary, row):
total = sum([summary[label][0][2] for label in summary])
probabilities = dict()
for class_, class_summary in summary.items():
probabilities[class_] = summary[class_][0][2]/total
for i in range(len(class_summary)):
mean, stdev, count = class_summary[i]
probabilities[class_] *= gaussian_pdf(row[i], mean, stdev)
return probabilities_____no_output_____def predict(summary, row):
cls_prob = class_probabilities(summary, row)
_label, _prob = None, -1.0
for class_, probability in cls_prob.items():
if _label is None or probability > _prob:
_prob = probability
_label = class_
return _label _____no_output_____
</code>
In order to verify proper implementation a **toy dataset** is used to evaluate the algorithm._____no_output_____
<code>
dataset = [[3.393533211,2.331273381,0],
[3.110073483,1.781539638,0],
[1.343808831,3.368360954,0],
[3.582294042,4.67917911,0],
[2.280362439,2.866990263,0],
[7.423436942,4.696522875,1],
[5.745051997,3.533989803,1],
[9.172168622,2.511101045,1],
[7.792783481,3.424088941,1],
[7.939820817,0.791637231,1]]
summaries = summary_by_class(dataset)
for row in dataset:
y_pred = predict(summaries, row)
y_real = row[-1]
print("Expected={0}, Predicted={1}".format(y_real, y_pred))Expected=0, Predicted=0
Expected=0, Predicted=0
Expected=0, Predicted=0
Expected=0, Predicted=0
Expected=0, Predicted=0
Expected=1, Predicted=1
Expected=1, Predicted=1
Expected=1, Predicted=1
Expected=1, Predicted=1
Expected=1, Predicted=1
</code>
# _GAUSSIAN NAIVE BAYES APPLICATION_
From the `UCI Machine Learning Repository` which contains Iris dataset, we will train our `Gaussian Naive Bayes` model. The Iris dataset is perhaps the best known database to be found in the pattern recognition literature. Fisher's paper is a classic in the field and is referenced frequently to this day.
The dataset contains 3 classes of 50 instances each, where each class refers to a type of iris plant. One class is linearly separable from the other 2; the latter are not linearly separable from each other.
The dataset have 150 instances and the following attributes:
1. sepal length in cm
2. sepal width in cm
3. petal length in cm
4. petal width in cm
5. class:
-- Iris Setosa
-- Iris Versicolour
-- Iris Virginica_____no_output_____To compare the performance of our _Classifier_ on the **Iris** dataset, a Gaussian Naive Bayes model from `sklearn` will be fit on the dataset and classification report for both models is generated._____no_output_____
<code>
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import classification_report_____no_output_____##LOADING 'IRIS' DATASET
columns = ['sepal-len','sepal-wid','petal-len','petal-wid','class']
df = pd.read_csv('./data/Iris.csv', names = columns)
df.head()_____no_output_____df.info()<class 'pandas.core.frame.DataFrame'>
RangeIndex: 150 entries, 0 to 149
Data columns (total 5 columns):
sepal-len 150 non-null float64
sepal-wid 150 non-null float64
petal-len 150 non-null float64
petal-wid 150 non-null float64
class 150 non-null object
dtypes: float64(4), object(1)
memory usage: 5.9+ KB
</code>
Due to the class variable type is `categorical` we need first to encode it as numeric type in order to be feed it into our models._____no_output_____
<code>
def encoder(df, class_value_pair):
for class_name, value in class_value_pair.items():
df['class'] = df['class'].replace(class_name, value)
return df
class_encoder = {'Iris-setosa':0, 'Iris-versicolor':1, 'Iris-virginica':2}
df = encoder(df, class_encoder)
df.head()_____no_output_____df['class'].value_counts().sort_index()_____no_output_____
</code>
Once the preprocessing is complete the dataset will be split into a `Training` & `Test` dataset._____no_output_____
<code>
X_ = df.drop(['class'],axis = 1)
y = df['class']
X_train, X_test, y_train, y_test = train_test_split(X_, y, test_size = 0.30, random_state = 5)_____no_output_____
</code>
Now, we can `train` our customized model. Noticed that our _Gaussian Naive Bayes_ model expects a complete dataset (attributes and labels) in order to calculate the summaries._____no_output_____
<code>
ds_train = pd.concat([X_train, y_train], axis = 1)
GNB_custom = summary_by_class(ds_train.values.tolist())_____no_output_____ds_test = pd.concat([X_test, y_test], axis = 1)
cust_pred = [predict(GNB_custom, row) for row in ds_test.values.tolist()]
cust_pred = np.array(cust_pred, dtype = 'int64')_____no_output_____cust_pred_____no_output_____
</code>
Now an instance of `sklearn` _Gaussian Naive Bayes_ model is created and fit it with the training data and an array of predictions is obtained in order to get out performance comparation_____no_output_____
<code>
##GET AND INSTANCE OF GAUSSIAN NAIVE BAYES MODEL
GNB_skln = GaussianNB()
GNB_skln.fit(X_train, y_train)
##CREATE SKLEARN PREDICTIONS ARRAY
sk_pred = GNB_skln.predict(X_test)_____no_output_____sk_pred_____no_output_____
</code>
By last, a comparison on both models is performed thru a _Classification Report______no_output_____
<code>
print("Sklearn:")
print(classification_report(y_test, sk_pred))
print("Custom:")
print(classification_report(y_test, cust_pred))Sklearn:
precision recall f1-score support
0 1.00 1.00 1.00 15
1 0.88 0.94 0.91 16
2 0.92 0.86 0.89 14
micro avg 0.93 0.93 0.93 45
macro avg 0.94 0.93 0.93 45
weighted avg 0.93 0.93 0.93 45
Custom:
precision recall f1-score support
0 1.00 1.00 1.00 15
1 0.88 0.94 0.91 16
2 0.92 0.86 0.89 14
micro avg 0.93 0.93 0.93 45
macro avg 0.94 0.93 0.93 45
weighted avg 0.93 0.93 0.93 45
</code>
| {
"repository": "acadena-repo/MACHINE-LEARNING-TOP-ALGORITHMS-SERIES",
"path": "ML SERIES - NAIVE BAYES/MACHINE LEARNING TOP ALGORITHMS - NAIVE BAYES CLASSIFIER.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 23119,
"hexsha": "d092d384f32019d2124226ae9f7f1fbb7c9d288a",
"max_line_length": 456,
"avg_line_length": 30.3001310616,
"alphanum_fraction": 0.4706950993
} |
# Notebook from gequitz/Issuing-Credit-Cards-and-SQL-Assignment
Path: Soccer_SQLite.ipynb
<code>
import pandas as pd
import sqlite3
conn = sqlite3.connect('database.sqlite')
query = "SELECT * FROM sqlite_master"
df_schema = pd.read_sql_query(query, conn)
df_schema.tbl_name.unique()_____no_output_____df_schema.head(20)_____no_output_____cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Player_Attributes")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'player_fifa_api_id', 'INTEGER', 0, None, 0)
(2, 'player_api_id', 'INTEGER', 0, None, 0)
(3, 'date', 'TEXT', 0, None, 0)
(4, 'overall_rating', 'INTEGER', 0, None, 0)
(5, 'potential', 'INTEGER', 0, None, 0)
(6, 'preferred_foot', 'TEXT', 0, None, 0)
(7, 'attacking_work_rate', 'TEXT', 0, None, 0)
(8, 'defensive_work_rate', 'TEXT', 0, None, 0)
(9, 'crossing', 'INTEGER', 0, None, 0)
(10, 'finishing', 'INTEGER', 0, None, 0)
(11, 'heading_accuracy', 'INTEGER', 0, None, 0)
(12, 'short_passing', 'INTEGER', 0, None, 0)
(13, 'volleys', 'INTEGER', 0, None, 0)
(14, 'dribbling', 'INTEGER', 0, None, 0)
(15, 'curve', 'INTEGER', 0, None, 0)
(16, 'free_kick_accuracy', 'INTEGER', 0, None, 0)
(17, 'long_passing', 'INTEGER', 0, None, 0)
(18, 'ball_control', 'INTEGER', 0, None, 0)
(19, 'acceleration', 'INTEGER', 0, None, 0)
(20, 'sprint_speed', 'INTEGER', 0, None, 0)
(21, 'agility', 'INTEGER', 0, None, 0)
(22, 'reactions', 'INTEGER', 0, None, 0)
(23, 'balance', 'INTEGER', 0, None, 0)
(24, 'shot_power', 'INTEGER', 0, None, 0)
(25, 'jumping', 'INTEGER', 0, None, 0)
(26, 'stamina', 'INTEGER', 0, None, 0)
(27, 'strength', 'INTEGER', 0, None, 0)
(28, 'long_shots', 'INTEGER', 0, None, 0)
(29, 'aggression', 'INTEGER', 0, None, 0)
(30, 'interceptions', 'INTEGER', 0, None, 0)
(31, 'positioning', 'INTEGER', 0, None, 0)
(32, 'vision', 'INTEGER', 0, None, 0)
(33, 'penalties', 'INTEGER', 0, None, 0)
(34, 'marking', 'INTEGER', 0, None, 0)
(35, 'standing_tackle', 'INTEGER', 0, None, 0)
(36, 'sliding_tackle', 'INTEGER', 0, None, 0)
(37, 'gk_diving', 'INTEGER', 0, None, 0)
(38, 'gk_handling', 'INTEGER', 0, None, 0)
(39, 'gk_kicking', 'INTEGER', 0, None, 0)
(40, 'gk_positioning', 'INTEGER', 0, None, 0)
(41, 'gk_reflexes', 'INTEGER', 0, None, 0)
#df_schema.to_csv("soccer_schema.csv")_____no_output_____cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Player")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'player_api_id', 'INTEGER', 0, None, 0)
(2, 'player_name', 'TEXT', 0, None, 0)
(3, 'player_fifa_api_id', 'INTEGER', 0, None, 0)
(4, 'birthday', 'TEXT', 0, None, 0)
(5, 'height', 'INTEGER', 0, None, 0)
(6, 'weight', 'INTEGER', 0, None, 0)
cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Match")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'country_id', 'INTEGER', 0, None, 0)
(2, 'league_id', 'INTEGER', 0, None, 0)
(3, 'season', 'TEXT', 0, None, 0)
(4, 'stage', 'INTEGER', 0, None, 0)
(5, 'date', 'TEXT', 0, None, 0)
(6, 'match_api_id', 'INTEGER', 0, None, 0)
(7, 'home_team_api_id', 'INTEGER', 0, None, 0)
(8, 'away_team_api_id', 'INTEGER', 0, None, 0)
(9, 'home_team_goal', 'INTEGER', 0, None, 0)
(10, 'away_team_goal', 'INTEGER', 0, None, 0)
(11, 'home_player_X1', 'INTEGER', 0, None, 0)
(12, 'home_player_X2', 'INTEGER', 0, None, 0)
(13, 'home_player_X3', 'INTEGER', 0, None, 0)
(14, 'home_player_X4', 'INTEGER', 0, None, 0)
(15, 'home_player_X5', 'INTEGER', 0, None, 0)
(16, 'home_player_X6', 'INTEGER', 0, None, 0)
(17, 'home_player_X7', 'INTEGER', 0, None, 0)
(18, 'home_player_X8', 'INTEGER', 0, None, 0)
(19, 'home_player_X9', 'INTEGER', 0, None, 0)
(20, 'home_player_X10', 'INTEGER', 0, None, 0)
(21, 'home_player_X11', 'INTEGER', 0, None, 0)
(22, 'away_player_X1', 'INTEGER', 0, None, 0)
(23, 'away_player_X2', 'INTEGER', 0, None, 0)
(24, 'away_player_X3', 'INTEGER', 0, None, 0)
(25, 'away_player_X4', 'INTEGER', 0, None, 0)
(26, 'away_player_X5', 'INTEGER', 0, None, 0)
(27, 'away_player_X6', 'INTEGER', 0, None, 0)
(28, 'away_player_X7', 'INTEGER', 0, None, 0)
(29, 'away_player_X8', 'INTEGER', 0, None, 0)
(30, 'away_player_X9', 'INTEGER', 0, None, 0)
(31, 'away_player_X10', 'INTEGER', 0, None, 0)
(32, 'away_player_X11', 'INTEGER', 0, None, 0)
(33, 'home_player_Y1', 'INTEGER', 0, None, 0)
(34, 'home_player_Y2', 'INTEGER', 0, None, 0)
(35, 'home_player_Y3', 'INTEGER', 0, None, 0)
(36, 'home_player_Y4', 'INTEGER', 0, None, 0)
(37, 'home_player_Y5', 'INTEGER', 0, None, 0)
(38, 'home_player_Y6', 'INTEGER', 0, None, 0)
(39, 'home_player_Y7', 'INTEGER', 0, None, 0)
(40, 'home_player_Y8', 'INTEGER', 0, None, 0)
(41, 'home_player_Y9', 'INTEGER', 0, None, 0)
(42, 'home_player_Y10', 'INTEGER', 0, None, 0)
(43, 'home_player_Y11', 'INTEGER', 0, None, 0)
(44, 'away_player_Y1', 'INTEGER', 0, None, 0)
(45, 'away_player_Y2', 'INTEGER', 0, None, 0)
(46, 'away_player_Y3', 'INTEGER', 0, None, 0)
(47, 'away_player_Y4', 'INTEGER', 0, None, 0)
(48, 'away_player_Y5', 'INTEGER', 0, None, 0)
(49, 'away_player_Y6', 'INTEGER', 0, None, 0)
(50, 'away_player_Y7', 'INTEGER', 0, None, 0)
(51, 'away_player_Y8', 'INTEGER', 0, None, 0)
(52, 'away_player_Y9', 'INTEGER', 0, None, 0)
(53, 'away_player_Y10', 'INTEGER', 0, None, 0)
(54, 'away_player_Y11', 'INTEGER', 0, None, 0)
(55, 'home_player_1', 'INTEGER', 0, None, 0)
(56, 'home_player_2', 'INTEGER', 0, None, 0)
(57, 'home_player_3', 'INTEGER', 0, None, 0)
(58, 'home_player_4', 'INTEGER', 0, None, 0)
(59, 'home_player_5', 'INTEGER', 0, None, 0)
(60, 'home_player_6', 'INTEGER', 0, None, 0)
(61, 'home_player_7', 'INTEGER', 0, None, 0)
(62, 'home_player_8', 'INTEGER', 0, None, 0)
(63, 'home_player_9', 'INTEGER', 0, None, 0)
(64, 'home_player_10', 'INTEGER', 0, None, 0)
(65, 'home_player_11', 'INTEGER', 0, None, 0)
(66, 'away_player_1', 'INTEGER', 0, None, 0)
(67, 'away_player_2', 'INTEGER', 0, None, 0)
(68, 'away_player_3', 'INTEGER', 0, None, 0)
(69, 'away_player_4', 'INTEGER', 0, None, 0)
(70, 'away_player_5', 'INTEGER', 0, None, 0)
(71, 'away_player_6', 'INTEGER', 0, None, 0)
(72, 'away_player_7', 'INTEGER', 0, None, 0)
(73, 'away_player_8', 'INTEGER', 0, None, 0)
(74, 'away_player_9', 'INTEGER', 0, None, 0)
(75, 'away_player_10', 'INTEGER', 0, None, 0)
(76, 'away_player_11', 'INTEGER', 0, None, 0)
(77, 'goal', 'TEXT', 0, None, 0)
(78, 'shoton', 'TEXT', 0, None, 0)
(79, 'shotoff', 'TEXT', 0, None, 0)
(80, 'foulcommit', 'TEXT', 0, None, 0)
(81, 'card', 'TEXT', 0, None, 0)
(82, 'cross', 'TEXT', 0, None, 0)
(83, 'corner', 'TEXT', 0, None, 0)
(84, 'possession', 'TEXT', 0, None, 0)
(85, 'B365H', 'NUMERIC', 0, None, 0)
(86, 'B365D', 'NUMERIC', 0, None, 0)
(87, 'B365A', 'NUMERIC', 0, None, 0)
(88, 'BWH', 'NUMERIC', 0, None, 0)
(89, 'BWD', 'NUMERIC', 0, None, 0)
(90, 'BWA', 'NUMERIC', 0, None, 0)
(91, 'IWH', 'NUMERIC', 0, None, 0)
(92, 'IWD', 'NUMERIC', 0, None, 0)
(93, 'IWA', 'NUMERIC', 0, None, 0)
(94, 'LBH', 'NUMERIC', 0, None, 0)
(95, 'LBD', 'NUMERIC', 0, None, 0)
(96, 'LBA', 'NUMERIC', 0, None, 0)
(97, 'PSH', 'NUMERIC', 0, None, 0)
(98, 'PSD', 'NUMERIC', 0, None, 0)
(99, 'PSA', 'NUMERIC', 0, None, 0)
(100, 'WHH', 'NUMERIC', 0, None, 0)
(101, 'WHD', 'NUMERIC', 0, None, 0)
(102, 'WHA', 'NUMERIC', 0, None, 0)
(103, 'SJH', 'NUMERIC', 0, None, 0)
(104, 'SJD', 'NUMERIC', 0, None, 0)
(105, 'SJA', 'NUMERIC', 0, None, 0)
(106, 'VCH', 'NUMERIC', 0, None, 0)
(107, 'VCD', 'NUMERIC', 0, None, 0)
(108, 'VCA', 'NUMERIC', 0, None, 0)
(109, 'GBH', 'NUMERIC', 0, None, 0)
(110, 'GBD', 'NUMERIC', 0, None, 0)
(111, 'GBA', 'NUMERIC', 0, None, 0)
(112, 'BSH', 'NUMERIC', 0, None, 0)
(113, 'BSD', 'NUMERIC', 0, None, 0)
(114, 'BSA', 'NUMERIC', 0, None, 0)
cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("League")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'country_id', 'INTEGER', 0, None, 0)
(2, 'name', 'TEXT', 0, None, 0)
cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Country")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'name', 'TEXT', 0, None, 0)
cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Team")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'team_api_id', 'INTEGER', 0, None, 0)
(2, 'team_fifa_api_id', 'INTEGER', 0, None, 0)
(3, 'team_long_name', 'TEXT', 0, None, 0)
(4, 'team_short_name', 'TEXT', 0, None, 0)
cur = conn.cursor()
cur.execute('''SELECT * from pragma_table_info("Team_Attributes")''' )
rows = cur.fetchall()
for row in rows[:]:
print(row)(0, 'id', 'INTEGER', 0, None, 1)
(1, 'team_fifa_api_id', 'INTEGER', 0, None, 0)
(2, 'team_api_id', 'INTEGER', 0, None, 0)
(3, 'date', 'TEXT', 0, None, 0)
(4, 'buildUpPlaySpeed', 'INTEGER', 0, None, 0)
(5, 'buildUpPlaySpeedClass', 'TEXT', 0, None, 0)
(6, 'buildUpPlayDribbling', 'INTEGER', 0, None, 0)
(7, 'buildUpPlayDribblingClass', 'TEXT', 0, None, 0)
(8, 'buildUpPlayPassing', 'INTEGER', 0, None, 0)
(9, 'buildUpPlayPassingClass', 'TEXT', 0, None, 0)
(10, 'buildUpPlayPositioningClass', 'TEXT', 0, None, 0)
(11, 'chanceCreationPassing', 'INTEGER', 0, None, 0)
(12, 'chanceCreationPassingClass', 'TEXT', 0, None, 0)
(13, 'chanceCreationCrossing', 'INTEGER', 0, None, 0)
(14, 'chanceCreationCrossingClass', 'TEXT', 0, None, 0)
(15, 'chanceCreationShooting', 'INTEGER', 0, None, 0)
(16, 'chanceCreationShootingClass', 'TEXT', 0, None, 0)
(17, 'chanceCreationPositioningClass', 'TEXT', 0, None, 0)
(18, 'defencePressure', 'INTEGER', 0, None, 0)
(19, 'defencePressureClass', 'TEXT', 0, None, 0)
(20, 'defenceAggression', 'INTEGER', 0, None, 0)
(21, 'defenceAggressionClass', 'TEXT', 0, None, 0)
(22, 'defenceTeamWidth', 'INTEGER', 0, None, 0)
(23, 'defenceTeamWidthClass', 'TEXT', 0, None, 0)
(24, 'defenceDefenderLineClass', 'TEXT', 0, None, 0)
</code>
## Question 1: Which team scored the most points when playing at home?_____no_output_____
<code>
cur = conn.cursor()
cur.execute("SELECT sum(a.home_team_goal) as sum_goals, b.team_api_id, b.team_long_name FROM match a, team b WHERE a.home_team_api_id = b.team_api_id group by b.team_api_id order by sum_goals desc limit 1" )
rows = cur.fetchall()
for row in rows[:]:
print(row)
(505, 8633, 'Real Madrid CF')
</code>
## Question 2: Did this team also score the most points when playing away?_____no_output_____
<code>
cur = conn.cursor()
cur.execute("SELECT sum(a.away_team_goal) as sum_goals, b.team_api_id, b.team_long_name FROM match a, team b WHERE a.away_team_api_id = b.team_api_id group by b.team_api_id order by sum_goals desc limit 1" )
rows = cur.fetchall()
for row in rows[:]:
print(row)(354, 8634, 'FC Barcelona')
</code>
## Question 3: How many matches resulted in a tie?_____no_output_____
<code>
cur = conn.cursor()
cur.execute("SELECT count(match_api_id) FROM match where home_team_goal = away_team_goal" )
rows = cur.fetchall()
for row in rows[:]:
print(row)(6596,)
</code>
## Question 4: How many players have Smith for their last name? How many have 'smith' anywhere in their name?_____no_output_____
<code>
cur = conn.cursor()
cur.execute("SELECT COUNT(player_name) FROM Player where player_name LIKE '% smith' " )
rows = cur.fetchall()
for row in rows[:]:
print(row)
(15,)
cur = conn.cursor()
cur.execute("SELECT COUNT(player_name) FROM Player where player_name LIKE '%smith%' " )
rows = cur.fetchall()
for row in rows[:]:
print(row)(18,)
</code>
## Question 5: What was the median tie score? Use the value determined in the previous question for the number of tie games. Hint: PostgreSQL does not have a median function. Instead, think about the steps required to calculate a median and use the WITH command to store stepwise results as a table and then operate on these results._____no_output_____
<code>
cur = conn.cursor()
#cur.execute("WITH goal_list AS (SELECT home_team_goal FROM match where home_team_goal = away_team_goal order \
# by home_team_goal desc) select home_team_goal from goal_list limit 1 offset 6596/2" )
cur.execute("WITH goal_list AS (SELECT home_team_goal FROM match where home_team_goal = away_team_goal order \
by home_team_goal desc) select home_team_goal from goal_list limit 1 offset (select count(*) from goal_list)/2" )
rows = cur.fetchall()
for row in rows[:20]:
print(row)
(1,)
</code>
## Question 6: What percentage of players prefer their left or right foot? Hint: Calculate either the right or left foot, whichever is easier based on how you setup the problem._____no_output_____
<code>
cur = conn.cursor()
cur.execute("SELECT (COUNT(DISTINCT(player_api_id)) * 100.0 / (SELECT COUNT(DISTINCT(player_api_id)) FROM Player_Attributes)) \
FROM Player_Attributes WHERE preferred_foot LIKE '%right%' " )
rows = cur.fetchall()
for row in rows[:20]:
print(row)
#SELECT (COUNT(DISTINCT(player_api_id)) * 100.0 / (SELECT COUNT(DISTINCT(player_api_id)) FROM Player_Attributes)) as percentage
#FROM Player_Attributes
#WHERE preferred_foot LIKE '%left%'(81.18444846292948,)
</code>
| {
"repository": "gequitz/Issuing-Credit-Cards-and-SQL-Assignment",
"path": "Soccer_SQLite.ipynb",
"matched_keywords": [
"bwa"
],
"stars": null,
"size": 27910,
"hexsha": "d09403f15f8d5084937d53a3ac45fa7b0737dfab",
"max_line_length": 339,
"avg_line_length": 33.789346247,
"alphanum_fraction": 0.4601218201
} |
# Notebook from klahrich/mlmachine
Path: notebooks/mlmachine_part_2.ipynb
__mlmachine - GroupbyImputer, KFoldEncoder, and Skew Correction__
<br><br>
Welcome to Example Notebook 2. If you're new to mlmachine, check out [Example Notebook 1](https://github.com/petersontylerd/mlmachine/blob/master/notebooks/mlmachine_part_1.ipynb).
<br><br>
Check out the [GitHub repository](https://github.com/petersontylerd/mlmachine).
<br><br>
1. [Missing Values - Assessment & GroupbyImputer](#Missing-Values-Assessment-&-GroupbyImputer)
1. [Assessment](#Assessment)
1. [GroupbyImputer](#GroupbyImputer)
1. [Imputation](#Imputation)
1. [KFold Encoding - Exotic Encoding Without the Leakage](#KFold-Encoding-Exotic-Encoding-Without-the-Leakage)
1. [KFoldEncoder](#KFoldEncoder)
1. [Box, Cox, Yeo & Johnson - Skew Correctors](#Box,-Cox,-Yeo-&-Johnson-Skew-Correctors)
1. [Assessment](#Assessment-1)
1. [Skew correction](#Skew-correction)_____no_output_____---
# Missing Values - Assessment & GroupbyImputer
---
<br><br>
Let's start by instantiating a couple `Machine()` objects, one for our training data and a second for our validation data:
<br><br>_____no_output_____<a id = 'Missing-Values-Assessment-&-GroupbyImputer'></a>_____no_output_____
<code>
# import libraries
import numpy as np
import pandas as pd
# import mlmachine tools
import mlmachine as mlm
from mlmachine.data import titanic
# use titanic() function to create DataFrames for training and validation datasets
df_train, df_valid = titanic()
# ordinal encoding hierarchy
ordinal_encodings = {"Pclass": [1, 2, 3]}
# instantiate a Machine object for the training data
mlmachine_titanic_train = mlm.Machine(
data=df_train,
target="Survived",
remove_features=["PassengerId","Ticket","Name"],
identify_as_continuous=["Age","Fare"],
identify_as_count=["Parch","SibSp"],
identify_as_nominal=["Embarked"],
identify_as_ordinal=["Pclass"],
ordinal_encodings=ordinal_encodings,
is_classification=True,
)
# instantiate a Machine object for the validation data
mlmachine_titanic_valid = mlm.Machine(
data=df_valid,
remove_features=["PassengerId","Ticket","Name"],
identify_as_continuous=["Age","Fare"],
identify_as_count=["Parch","SibSp"],
identify_as_nominal=["Embarked"],
identify_as_ordinal=["Pclass"],
ordinal_encodings=ordinal_encodings,
is_classification=True,
)
~/.pyenv/versions/main37/lib/python3.7/site-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.
warnings.warn(msg, category=FutureWarning)
</code>
---
## Assessment
---
<br><br>
Each `Machine()` object contains a method for summarizing missingness in tabular form and in graphical form:
<br><br>_____no_output_____<a id = 'Assessment'></a>_____no_output_____
<code>
# generate missingness summary for training data
mlmachine_titanic_train.eda_missing_summary(display_df=True)_____no_output_____
</code>
---
<br><br>
By default, this method acts on the `data` attribute associated with `mlmachine_train`. Let's do the same for the validation dataset:
<br><br>_____no_output_____
<code>
# generate missingness summary for validation data
mlmachine_titanic_valid.eda_missing_summary(display_df=True)_____no_output_____
</code>
---
<br><br>
Next, we need to determine if there are features with missing values in the training data, but not the validation data, and vice versa. This informs how we should set up our transformation pipeline. For example, if a feature has missing values in the validation dataset, but not the training dataset, we will still want to `fit_transform()` this feature on the training data to learn imputation values to apply on the nulls in the validation dataset.
<br><br>
We could eyeball the tables and visuals above to compare the state of missingness in the two datasets, but this can be tedious, particularly with large datasets. Instead, we will leverage a method within our `Machine()` object. We simply pass the validation dataset to `mlmachine_titanic_train`'s method `missing_col_compare`, which returns a bidirectional missingness summary.
<br><br>_____no_output_____
<code>
# generate missingness comparison summary
mlmachine_titanic_train.missing_column_compare(
validation_data=mlmachine_titanic_valid.data,
)Feature has missing values in validation data, not training data.
{'Fare'}
Feature has missing values in training data, not validation data.
{'Embarked'}
</code>
---
<br><br>
The key observation here is that "Fare" is fully populated in the training data, but not the validation data. We need to make sure our pipeline learns how to impute these missing values based on the training data, despite the fact that the training data is not missing any values in this feature.
<br><br>_____no_output_____---
## GroupbyImputer
---
<br><br>
mlmachine includes a transformer called `GroupbyImputer()`, which makes it easy to perform the same basic imputation techniques provided by Scikit-learn's `SimpleImputer()`, but with the added ability to group by another feature in the dataset. Let's see an example:
<br><br>_____no_output_____<a id = 'GroupbyImputer'></a>_____no_output_____
<code>
# import mlmachine tools
from mlmachine.features.preprocessing import GroupbyImputer
# instantiate GroupbyImputer to fill "Age" mean, grouped by "SibSp"
impute = GroupbyImputer(null_column="Age", groupby_column="SibSp", strategy="mean")
impute.fit_transform(mlmachine_titanic_train.data[["Age","SibSp"]])
display(impute.train_value)_____no_output_____
</code>
---
<br><br>
In the code snippet above, we mean impute "Age", grouped by "SibSp". We pass "Age" to the `null_column` parameter to indicate which column contains the nulls, and pass "SibSp" to the `groupby_column` parameter. The strategy parameter receives the same instructions as Scikit-learn's `SimpleImputer()` - "mean", "median" and "most_frequent".
<br><br>
To inspect the learned values, we can display the object's `train_value` attribute, which is a `DataFrame` containing the category/value pairs
<br><br>
`GroupbyImputer` uses these pairs to impute the missing values in "Age". If, in the unlikely circumstance, a level in `groupby_column` has only null values in `null_column`, then the missing values associated with that level will be imputed with the mean, median or mode of the entire feature.
<br><br>_____no_output_____---
## Imputation
---
<br><br>
Now we're going to use `GroupbyImputer()` within `PandasFeatureUnion()` to impute nulls in both the training and validation datasets.
<br><br>_____no_output_____<a id = 'Imputation'></a>_____no_output_____
<code>
# import libraries
from sklearn.impute import SimpleImputer
from sklearn.pipeline import make_pipeline
# import mlmachine tools
from mlmachine.features.preprocessing import (
DataFrameSelector,
PandasTransformer,
PandasFeatureUnion,
)
# create imputation PandasFeatureUnion pipeline
impute_pipe = PandasFeatureUnion([
("age", make_pipeline(
DataFrameSelector(include_columns=["Age","SibSp"]),
GroupbyImputer(null_column="Age", groupby_column="SibSp", strategy="mean")
)),
("fare", make_pipeline(
DataFrameSelector(include_columns=["Fare","Pclass"]),
GroupbyImputer(null_column="Fare", groupby_column="Pclass", strategy="mean")
)),
("embarked", make_pipeline(
DataFrameSelector(include_columns=["Embarked"]),
PandasTransformer(SimpleImputer(strategy="most_frequent"))
)),
("cabin", make_pipeline(
DataFrameSelector(include_columns=["Cabin"]),
PandasTransformer(SimpleImputer(strategy="constant", fill_value="X"))
)),
("diff", make_pipeline(
DataFrameSelector(exclude_columns=["Age","Fare","Embarked","Cabin"])
)),
])
# fit and transform training data, transform validation data
mlmachine_titanic_train.data = impute_pipe.fit_transform(mlmachine_titanic_train.data)
mlmachine_titanic_valid.data = impute_pipe.transform(mlmachine_titanic_valid.data)_____no_output_____mlmachine_titanic_train.data[:20]_____no_output_____
</code>
---
<br><br>
`GroupbyImputer()` makes two appearances in this `PandasFeatureUnion()` operation. On line 4, we groupby the feature "SibSp" to impute the mean "Age" value, and on line 8 we groupby the feature "Pclass" to impute the mean "Fare" value.
<br><br>
Imputations for "Embarked" and "Cabin" are completed in straightforward fashion - "Embarked" is simply imputed with the mode, and "Cabin" is imputed with the constant value of "X".
<br><br>
Lastly, we `fit_transform()` the `PandasFeatureUnion()` on `mlmachine_titanic_train.data` and finish filling our nulls by calling `transform()` on `mlmachine_titanic_valid.data`.
<br><br>_____no_output_____---
# KFold Encoding - Exotic Encoding Without the Leakage
---
<br><br>
Target value-based encoding techniques such as mean encoding, CatBoost Encoding, and Weight of Evidence encoding are often discussed in the context of Kaggle competitions. The primary advantage of these techniques is that they use the target variable to inform the encoded feature's values. However, this comes with the risk of leaking target information into the encoded values.
<br><br>
KFold cross-validation assists in avoiding this problem. The key is to apply the encoded values to the out-of-fold observations only. This visualization illustrates the general pattern:
<br><br>
<br><br>

<br><br>
- Separate a validation subset from the training dataset.
- Learn the encoded values from the training data and the associated target values.
- Apply the learned values to the validation observations only.
- Repeat the process on the K-1 remaining folds._____no_output_____<a id = 'KFold-Encoding-Exotic-Encoding-Without-the-Leakage'></a>_____no_output_____---
## KFoldEncoder
---
<br><br>
mlmachine has a class called `KFoldEncoder` that facilitates KFold encoding with an encoder of choice. Let's use a small subset of our features to see how this works.
<br><br>
We want to target encode two features: "Pclass" and "Age". Since "Age" is a continuous feature, we first need to map the values to bins, which is effectively an ordinal categorical column. We handle all of this in the simple `PandasFeatureUnion` below:
<br><br>_____no_output_____<a id = 'KFoldEncoder'></a>_____no_output_____
<code>
# import libraries
from sklearn.preprocessing import KBinsDiscretizer
# create simple encoding PandasFeatureUnion pipeline
encode_pipe = PandasFeatureUnion([
("bin", make_pipeline(
DataFrameSelector(include_columns=["Age"]),
PandasTransformer(KBinsDiscretizer(encode="ordinal"))
)),
("select", make_pipeline(
DataFrameSelector(include_columns=["Age","Pclass"])
)),
])
# fit and transform training data, transform validation data
mlmachine_titanic_train.data = encode_pipe.fit_transform(mlmachine_titanic_train.data)
mlmachine_titanic_valid.data = encode_pipe.fit_transform(mlmachine_titanic_valid.data)
# update mlm_dtypes
mlmachine_titanic_train.update_dtypes()
mlmachine_titanic_valid.update_dtypes()_____no_output_____
</code>
---
<br><br>
This operation returns a binned version of "Age", as well as the original "Age" and "Pclass" features.
<br><br>_____no_output_____
<code>
mlmachine_titanic_train.data[:10]_____no_output_____
</code>
---
<br><br>
Next, we target encode both "Pclass" and "Age_binned_5" using mean encoding, CatBoost encoding and Weight of Evidence encoding as provided by the package category_encoders.
<br><br>_____no_output_____
<code>
# import libraries
from sklearn.model_selection import KFold
from category_encoders import WOEEncoder, TargetEncoder, CatBoostEncoder
# import mlmachine tools
from mlmachine.features.preprocessing import KFoldEncoder
# create KFold encoding PandasFeatureUnion pipeline
target_encode_pipe = PandasFeatureUnion([
("target", make_pipeline(
DataFrameSelector(include_mlm_dtypes=["category"], exclude_columns=["Cabin"]),
KFoldEncoder(
target=mlmachine_titanic_train.target,
cv=KFold(n_splits=5, shuffle=True, random_state=0),
encoder=TargetEncoder,
),
)),
("woe", make_pipeline(
DataFrameSelector(include_mlm_dtypes=["category"]),
KFoldEncoder(
target=mlmachine_titanic_train.target,
cv=KFold(n_splits=5, shuffle=False),
encoder=WOEEncoder,
),
)),
("catboost", make_pipeline(
DataFrameSelector(include_mlm_dtypes=["category"]),
KFoldEncoder(
target=mlmachine_titanic_train.target,
cv=KFold(n_splits=5, shuffle=False),
encoder=CatBoostEncoder,
),
)),
("diff", make_pipeline(
DataFrameSelector(exclude_mlm_dtypes=["category"]),
)),
])
# fit and transform training data, transform validation data
mlmachine_titanic_train.data = target_encode_pipe.fit_transform(mlmachine_titanic_train.data)
mlmachine_titanic_valid.data = target_encode_pipe.transform(mlmachine_titanic_valid.data)
# update mlm_dtypes
mlmachine_titanic_train.update_dtypes()
mlmachine_titanic_valid.update_dtypes()_____no_output_____mlmachine_titanic_train.data[:10]_____no_output_____
</code>
---
<br><br>
Let's review the key `KFoldEncoder()` parameters:
- `target`: the target attribute of our mlmachine_titanic_train object
- `cv`: a cross-validation object
- `encoder`: a target encoder class
<br><br>
`KFoldEncoder()` learns the encoded values on the training data, and applies the values to the out-of-fold observations.
<br><br>
On the validation data, the process is simpler: we calculate the average out-of-fold encodings applied to the training data and apply these values to all validation observations.
<br><br>_____no_output_____---
# Box, Cox, Yeo & Johnson - Skew Correctors
---_____no_output_____<a id = 'Box,-Cox,-Yeo-&-Johnson-Skew-Correctors'></a>_____no_output_____---
## Assessment
---
<br><br>
Just as we have a quick method for evaluating missingness, we have a quick method for evaluating skew.
<br><br>_____no_output_____<a id = 'Assessment-1'></a>_____no_output_____
<code>
# generate skewness summary
mlmachine_titanic_train.skew_summary()_____no_output_____
</code>
---
<br><br>
The `skew_summary()` method returns a `DataFrame` that summarizes the skew for each feature, along with a "Percent zero" column, which informs us of the percentage of values in the feature that are zero.
<br><br>_____no_output_____---
## Skew correction
---
<br><br>
mlmachine contains a class called `DualTransformer()`, which, by default, applies both Yeo-Johnson and Box-Cox transformations to the specified features with the intent of correcting skew. The Box-Cox transformation automatically seeks the lambda value which maximizes the log-likelihood function.
<br><br>
Since Box-Cox transformation requires all values in a feature to be greater than zero, `DualTransformer()` applies one of two simple feature adjustments when this rule is violated:
<br><br>
- If the minimum value in a feature is zero, each value in that feature is increased by a value of 1 prior to transformation.
- If the minimum value is less than zero, then each feature value is increased by the absolute value of the minimum value in the feature plus 1 prior to transformation.
<br><br>
Let's use `DualTransformer()` to see if we can minimize the skew in the original "Age" feature:
<br><br>_____no_output_____<a id = 'Skew-correction'></a>_____no_output_____
<code>
# import mlmachine tools
from mlmachine.features.preprocessing import DualTransformer
# create skew correction PandasFeatureUnion pipeline
skew_pipe = PandasFeatureUnion([
("skew", make_pipeline(
DataFrameSelector(include_columns=["Age"]),
DualTransformer(),
)),
])
# fit and transform training data, transform validation data
mlmachine_titanic_train.data = skew_pipe.fit_transform(mlmachine_titanic_train.data)
mlmachine_titanic_valid.data = skew_pipe.transform(mlmachine_titanic_valid.data)
# update mlm_dtypes
mlmachine_titanic_train.update_dtypes()
mlmachine_titanic_valid.update_dtypes()_____no_output_____mlmachine_titanic_train.data[:10]_____no_output_____
</code>
---
<br><br>
`DualTransformer()` adds the features "Age_BoxCox" and "Age_YeoJohnson". Let's execute `skew_summary()` again to see if `DualTransformer()` addressed the skew in our original feature:
<br><br>
"Age_BoxCox" and "Age_YeoJohnson" have a skew of 0.0286 and 0.0483, respectively.
<br><br>_____no_output_____
<code>
# generate skewness summary
mlmachine_titanic_train.skew_summary()_____no_output_____
</code>
---
<br><br>
Star the [GitHub repository](https://github.com/petersontylerd/mlmachine), and stay tuned for additional notebooks.
<br><br>_____no_output_____
| {
"repository": "klahrich/mlmachine",
"path": "notebooks/mlmachine_part_2.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 140206,
"hexsha": "d095771fc43850951f1356e547fde79233548604",
"max_line_length": 42016,
"avg_line_length": 75.9101245263,
"alphanum_fraction": 0.7366731809
} |
# Notebook from aaryapatel007/Patient-Selection-for-Diabetes-Drug-Testing
Path: code/student_project.ipynb
# Overview_____no_output_____1. Project Instructions & Prerequisites
2. Learning Objectives
3. Data Preparation
4. Create Categorical Features with TF Feature Columns
5. Create Continuous/Numerical Features with TF Feature Columns
6. Build Deep Learning Regression Model with Sequential API and TF Probability Layers
7. Evaluating Potential Model Biases with Aequitas Toolkit
_____no_output_____# 1. Project Instructions & Prerequisites_____no_output_____## Project Instructions_____no_output_____**Context**: EHR data is becoming a key source of real-world evidence (RWE) for the pharmaceutical industry and regulators to [make decisions on clinical trials](https://www.fda.gov/news-events/speeches-fda-officials/breaking-down-barriers-between-clinical-trials-and-clinical-care-incorporating-real-world-evidence). You are a data scientist for an exciting unicorn healthcare startup that has created a groundbreaking diabetes drug that is ready for clinical trial testing. It is a very unique and sensitive drug that requires administering the drug over at least 5-7 days of time in the hospital with frequent monitoring/testing and patient medication adherence training with a mobile application. You have been provided a patient dataset from a client partner and are tasked with building a predictive model that can identify which type of patients the company should focus their efforts testing this drug on. Target patients are people that are likely to be in the hospital for this duration of time and will not incur significant additional costs for administering this drug to the patient and monitoring.
In order to achieve your goal you must build a regression model that can predict the estimated hospitalization time for a patient and use this to select/filter patients for your study.
_____no_output_____**Expected Hospitalization Time Regression Model:** Utilizing a synthetic dataset(denormalized at the line level augmentation) built off of the UCI Diabetes readmission dataset, students will build a regression model that predicts the expected days of hospitalization time and then convert this to a binary prediction of whether to include or exclude that patient from the clinical trial.
This project will demonstrate the importance of building the right data representation at the encounter level, with appropriate filtering and preprocessing/feature engineering of key medical code sets. This project will also require students to analyze and interpret their model for biases across key demographic groups.
Please see the project rubric online for more details on the areas your project will be evaluated._____no_output_____### Dataset_____no_output_____Due to healthcare PHI regulations (HIPAA, HITECH), there are limited number of publicly available datasets and some datasets require training and approval. So, for the purpose of this exercise, we are using a dataset from UC Irvine(https://archive.ics.uci.edu/ml/datasets/Diabetes+130-US+hospitals+for+years+1999-2008) that has been modified for this course. Please note that it is limited in its representation of some key features such as diagnosis codes which are usually an unordered list in 835s/837s (the HL7 standard interchange formats used for claims and remits)._____no_output_____**Data Schema**
The dataset reference information can be https://github.com/udacity/nd320-c1-emr-data-starter/blob/master/project/data_schema_references/
. There are two CSVs that provide more details on the fields and some of the mapped values._____no_output_____## Project Submission _____no_output_____When submitting this project, make sure to run all the cells before saving the notebook. Save the notebook file as "student_project_submission.ipynb" and save another copy as an HTML file by clicking "File" -> "Download as.."->"html". Include the "utils.py" and "student_utils.py" files in your submission. The student_utils.py should be where you put most of your code that you write and the summary and text explanations should be written inline in the notebook. Once you download these files, compress them into one zip file for submission._____no_output_____## Prerequisites _____no_output_____- Intermediate level knowledge of Python
- Basic knowledge of probability and statistics
- Basic knowledge of machine learning concepts
- Installation of Tensorflow 2.0 and other dependencies(conda environment.yml or virtualenv requirements.txt file provided)_____no_output_____## Environment Setup_____no_output_____For step by step instructions on creating your environment, please go to https://github.com/udacity/nd320-c1-emr-data-starter/blob/master/project/README.md._____no_output_____# 2. Learning Objectives_____no_output_____By the end of the project, you will be able to
- Use the Tensorflow Dataset API to scalably extract, transform, and load datasets and build datasets aggregated at the line, encounter, and patient data levels(longitudinal)
- Analyze EHR datasets to check for common issues (data leakage, statistical properties, missing values, high cardinality) by performing exploratory data analysis.
- Create categorical features from Key Industry Code Sets (ICD, CPT, NDC) and reduce dimensionality for high cardinality features by using embeddings
- Create derived features(bucketing, cross-features, embeddings) utilizing Tensorflow feature columns on both continuous and categorical input features
- SWBAT use the Tensorflow Probability library to train a model that provides uncertainty range predictions that allow for risk adjustment/prioritization and triaging of predictions
- Analyze and determine biases for a model for key demographic groups by evaluating performance metrics across groups by using the Aequitas framework
_____no_output_____# 3. Data Preparation_____no_output_____
<code>
# from __future__ import absolute_import, division, print_function, unicode_literals
import os
import numpy as np
import seaborn as sns
import tensorflow as tf
from tensorflow.keras import layers
import tensorflow_probability as tfp
import matplotlib.pyplot as plt
import pandas as pd
import aequitas as ae
from sklearn.metrics import roc_auc_score, accuracy_score, f1_score, classification_report, precision_score, recall_score
# Put all of the helper functions in utils
from utils import build_vocab_files, show_group_stats_viz, aggregate_dataset, preprocess_df, df_to_dataset, posterior_mean_field, prior_trainable
from functools import partial
pd.set_option('display.max_columns', 500)
# this allows you to make changes and save in student_utils.py and the file is reloaded every time you run a code block
%load_ext autoreload
%autoreload_____no_output_____#OPEN ISSUE ON MAC OSX for TF model training
import os
os.environ['KMP_DUPLICATE_LIB_OK']='True'_____no_output_____
</code>
## Dataset Loading and Schema Review_____no_output_____Load the dataset and view a sample of the dataset along with reviewing the schema reference files to gain a deeper understanding of the dataset. The dataset is located at the following path https://github.com/udacity/nd320-c1-emr-data-starter/blob/master/project/starter_code/data/final_project_dataset.csv. Also, review the information found in the data schema https://github.com/udacity/nd320-c1-emr-data-starter/blob/master/project/data_schema_references/_____no_output_____
<code>
dataset_path = "./data/final_project_dataset.csv"
df = pd.read_csv(dataset_path)_____no_output_____# Line Test
try:
assert len(df) > df['encounter_id'].nunique()
print("Dataset could be at the line level")
except:
print("Dataset is not at the line level")Dataset could be at the line level
</code>
## Determine Level of Dataset (Line or Encounter)_____no_output_____**Question 1**: Based off of analysis of the data, what level is this dataset? Is it at the line or encounter level? Are there any key fields besides the encounter_id and patient_nbr fields that we should use to aggregate on? Knowing this information will help inform us what level of aggregation is necessary for future steps and is a step that is often overlooked. _____no_output_____**Student Response** : The dataset is at line level and needs to be converted to encounter level. The dataset should be aggregated on encounter_id, patient_nbr and principal_diagnosis_code._____no_output_____## Analyze Dataset_____no_output_____**Question 2**: Utilizing the library of your choice (recommend Pandas and Seaborn or matplotlib though), perform exploratory data analysis on the dataset. In particular be sure to address the following questions:
- a. Field(s) with high amount of missing/zero values
- b. Based off the frequency histogram for each numerical field, which numerical field(s) has/have a Gaussian(normal) distribution shape?
- c. Which field(s) have high cardinality and why (HINT: ndc_code is one feature)
- d. Please describe the demographic distributions in the dataset for the age and gender fields.
_____no_output_____**OPTIONAL**: Use the Tensorflow Data Validation and Analysis library to complete.
- The Tensorflow Data Validation and Analysis library(https://www.tensorflow.org/tfx/data_validation/get_started) is a useful tool for analyzing and summarizing dataset statistics. It is especially useful because it can scale to large datasets that do not fit into memory.
- Note that there are some bugs that are still being resolved with Chrome v80 and we have moved away from using this for the project. _____no_output_____**Student Response**:
1. Fields with high amount of missing/null values are:
*weight, payer_code, medical_speciality, number_outpatients, number_inpatients, number_emergency, num_procedures, ndc_codes.*
1. Numerical values having Gaussian Distribution are: *num_lab_procedures, number_medication.*
1. Fields having high cardinality are: *encounter_id, patient_nbr, other_diagnosis_codes.* It is because there there are 71,518 patients and more than 1 Lac encounters in the dataset and each encounter have various diagnoisis codes. This can also be reviewed by looking the Tensorflow Data Validation statistics.
1. Demographic distributions is shown below._____no_output_____
<code>
def check_null_df(df):
return pd.DataFrame({
'percent_null' : df.isna().sum() / len(df) * 100,
'percent_zero' : df.isin([0]).sum() / len(df) * 100,
'percent_missing' : df.isin(['?', '?|?', 'Unknown/Invalid']).sum() / len(df) * 100,
})
check_null_df(df)_____no_output_____plt.figure(figsize=(8, 5))
sns.countplot(x = 'age', data = df)_____no_output_____plt.figure(figsize=(8, 5))
sns.countplot(x = 'gender', data = df)_____no_output_____plt.figure(figsize=(8, 5))
sns.countplot(x = 'age', hue = 'gender', data = df)_____no_output_____plt.figure(figsize=(8, 5))
sns.distplot(df['num_lab_procedures'])_____no_output_____plt.figure(figsize=(8, 5))
sns.distplot(df['num_medications'])_____no_output_____######NOTE: The visualization will only display in Chrome browser. ########
# First install below libraries and then restart the kernel to visualize.
# !pip install tensorflow-data-validation
# !pip install apache-beam[interactive]
import tensorflow_data_validation as tfdv
full_data_stats = tfdv.generate_statistics_from_dataframe(dataframe=df) /opt/conda/lib/python3.7/site-packages/tensorflow_data_validation/arrow/arrow_util.py:236: FutureWarning: Calling .data on ChunkedArray is provided for compatibility after Column was removed, simply drop this attribute
types.FeaturePath([column_name]), column.data.chunk(0), weights):
tfdv.visualize_statistics(full_data_stats)_____no_output_____schema = tfdv.infer_schema(statistics=full_data_stats)
tfdv.display_schema(schema=schema)_____no_output_____categorical_columns_list = ['A1Cresult', 'age', 'change', 'gender', 'max_glu_serum', 'medical_specialty', 'payer_code', 'race',
'readmitted', 'weight']
def count_unique_values(df):
cat_df = df
return pd.DataFrame({
'columns' : cat_df.columns,
'cardinality' : cat_df.nunique()
}).reset_index(drop = True).sort_values(by = 'cardinality', ascending = False)
count_unique_values(df)_____no_output_____
</code>
## Reduce Dimensionality of the NDC Code Feature_____no_output_____**Question 3**: NDC codes are a common format to represent the wide variety of drugs that are prescribed for patient care in the United States. The challenge is that there are many codes that map to the same or similar drug. You are provided with the ndc drug lookup file https://github.com/udacity/nd320-c1-emr-data-starter/blob/master/project/data_schema_references/ndc_lookup_table.csv derived from the National Drug Codes List site(https://ndclist.com/). Please use this file to come up with a way to reduce the dimensionality of this field and create a new field in the dataset called "generic_drug_name" in the output dataframe. _____no_output_____
<code>
#NDC code lookup file
ndc_code_path = "./medication_lookup_tables/final_ndc_lookup_table"
ndc_code_df = pd.read_csv(ndc_code_path)_____no_output_____from student_utils import reduce_dimension_ndc_____no_output_____def reduce_dimension_ndc(df, ndc_code_df):
'''
df: pandas dataframe, input dataset
ndc_df: pandas dataframe, drug code dataset used for mapping in generic names
return:
df: pandas dataframe, output dataframe with joined generic drug name
'''
mapping = dict(ndc_code_df[['NDC_Code', 'Non-proprietary Name']].values)
mapping['nan'] = np.nan
df['generic_drug_name'] = df['ndc_code'].astype(str).apply(lambda x : mapping[x])
return df
reduce_dim_df = reduce_dimension_ndc(df, ndc_code_df)_____no_output_____reduce_dim_df.head()_____no_output_____# Number of unique values should be less for the new output field
assert df['ndc_code'].nunique() > reduce_dim_df['generic_drug_name'].nunique()
print('Number of ndc_code: ', df['ndc_code'].nunique())
print('Number of drug name: ', reduce_dim_df['generic_drug_name'].nunique())Number of ndc_code: 251
Number of drug name: 22
</code>
## Select First Encounter for each Patient _____no_output_____**Question 4**: In order to simplify the aggregation of data for the model, we will only select the first encounter for each patient in the dataset. This is to reduce the risk of data leakage of future patient encounters and to reduce complexity of the data transformation and modeling steps. We will assume that sorting in numerical order on the encounter_id provides the time horizon for determining which encounters come before and after another._____no_output_____
<code>
def select_first_encounter(df):
'''
df: pandas dataframe, dataframe with all encounters
return:
- first_encounter_df: pandas dataframe, dataframe with only the first encounter for a given patient
'''
df.sort_values(by = 'encounter_id')
first_encounters = df.groupby('patient_nbr')['encounter_id'].first().values
first_encounter_df = df[df['encounter_id'].isin(first_encounters)]
# first_encounter_df = first_encounter_df.groupby('encounter_id').first().reset_index()
return first_encounter_df_____no_output_____first_encounter_df = select_first_encounter(reduce_dim_df)_____no_output_____first_encounter_df.head()_____no_output_____# unique patients in transformed dataset
unique_patients = first_encounter_df['patient_nbr'].nunique()
print("Number of unique patients:{}".format(unique_patients))
# unique encounters in transformed dataset
unique_encounters = first_encounter_df['encounter_id'].nunique()
print("Number of unique encounters:{}".format(unique_encounters))
original_unique_patient_number = reduce_dim_df['patient_nbr'].nunique()
# number of unique patients should be equal to the number of unique encounters and patients in the final dataset
assert original_unique_patient_number == unique_patients
assert original_unique_patient_number == unique_encounters
print("Tests passed!!")Number of unique patients:71518
Number of unique encounters:71518
Tests passed!!
</code>
## Aggregate Dataset to Right Level for Modeling _____no_output_____In order to provide a broad scope of the steps and to prevent students from getting stuck with data transformations, we have selected the aggregation columns and provided a function to build the dataset at the appropriate level. The 'aggregate_dataset" function that you can find in the 'utils.py' file can take the preceding dataframe with the 'generic_drug_name' field and transform the data appropriately for the project.
To make it simpler for students, we are creating dummy columns for each unique generic drug name and adding those are input features to the model. There are other options for data representation but this is out of scope for the time constraints of the course._____no_output_____
<code>
exclusion_list = [ 'generic_drug_name', 'ndc_code']
grouping_field_list = [c for c in first_encounter_df.columns if c not in exclusion_list]
agg_drug_df, ndc_col_list = aggregate_dataset(first_encounter_df, grouping_field_list, 'generic_drug_name')_____no_output_____assert len(agg_drug_df) == agg_drug_df['patient_nbr'].nunique() == agg_drug_df['encounter_id'].nunique()_____no_output_____ndc_col_list_____no_output_____
</code>
## Prepare Fields and Cast Dataset _____no_output_____### Feature Selection_____no_output_____**Question 5**: After you have aggregated the dataset to the right level, we can do feature selection (we will include the ndc_col_list, dummy column features too). In the block below, please select the categorical and numerical features that you will use for the model, so that we can create a dataset subset.
For the payer_code and weight fields, please provide whether you think we should include/exclude the field in our model and give a justification/rationale for this based off of the statistics of the data. Feel free to use visualizations or summary statistics to support your choice._____no_output_____**Student response**: We should exclude both payer_code and weight in our model because of large missing values._____no_output_____
<code>
plt.figure(figsize=(8, 5))
sns.countplot(x = 'payer_code', data = agg_drug_df)_____no_output_____plt.figure(figsize=(8, 5))
sns.countplot(x = 'number_emergency', data = agg_drug_df)_____no_output_____count_unique_values(agg_drug_df[grouping_field_list])_____no_output_____'''
Please update the list to include the features you think are appropriate for the model
and the field that we will be using to train the model. There are three required demographic features for the model
and I have inserted a list with them already in the categorical list.
These will be required for later steps when analyzing data splits and model biases.
'''
required_demo_col_list = ['race', 'gender', 'age']
student_categorical_col_list = [ 'change', 'primary_diagnosis_code'
] + required_demo_col_list + ndc_col_list
student_numerical_col_list = [ 'number_inpatient', 'number_emergency', 'num_lab_procedures', 'number_diagnoses','num_medications','num_procedures']
PREDICTOR_FIELD = 'time_in_hospital'_____no_output_____def select_model_features(df, categorical_col_list, numerical_col_list, PREDICTOR_FIELD, grouping_key='patient_nbr'):
selected_col_list = [grouping_key] + [PREDICTOR_FIELD] + categorical_col_list + numerical_col_list
return agg_drug_df[selected_col_list]
_____no_output_____selected_features_df = select_model_features(agg_drug_df, student_categorical_col_list, student_numerical_col_list,
PREDICTOR_FIELD)_____no_output_____
</code>
### Preprocess Dataset - Casting and Imputing _____no_output_____We will cast and impute the dataset before splitting so that we do not have to repeat these steps across the splits in the next step. For imputing, there can be deeper analysis into which features to impute and how to impute but for the sake of time, we are taking a general strategy of imputing zero for only numerical features.
OPTIONAL: What are some potential issues with this approach? Can you recommend a better way and also implement it?_____no_output_____
<code>
processed_df = preprocess_df(selected_features_df, student_categorical_col_list,
student_numerical_col_list, PREDICTOR_FIELD, categorical_impute_value='nan', numerical_impute_value=0)/home/workspace/starter_code/utils.py:29: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
df[predictor] = df[predictor].astype(float)
/home/workspace/starter_code/utils.py:31: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
df[c] = cast_df(df, c, d_type=str)
/home/workspace/starter_code/utils.py:33: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
df[numerical_column] = impute_df(df, numerical_column, numerical_impute_value)
</code>
## Split Dataset into Train, Validation, and Test Partitions_____no_output_____**Question 6**: In order to prepare the data for being trained and evaluated by a deep learning model, we will split the dataset into three partitions, with the validation partition used for optimizing the model hyperparameters during training. One of the key parts is that we need to be sure that the data does not accidently leak across partitions.
Please complete the function below to split the input dataset into three partitions(train, validation, test) with the following requirements.
- Approximately 60%/20%/20% train/validation/test split
- Randomly sample different patients into each data partition
- **IMPORTANT** Make sure that a patient's data is not in more than one partition, so that we can avoid possible data leakage.
- Make sure that the total number of unique patients across the splits is equal to the total number of unique patients in the original dataset
- Total number of rows in original dataset = sum of rows across all three dataset partitions_____no_output_____
<code>
def patient_dataset_splitter(df, patient_key='patient_nbr'):
'''
df: pandas dataframe, input dataset that will be split
patient_key: string, column that is the patient id
return:
- train: pandas dataframe,
- validation: pandas dataframe,
- test: pandas dataframe,
'''
df[student_numerical_col_list] = df[student_numerical_col_list].astype(float)
train_val_df = df.sample(frac = 0.8, random_state=3)
train_df = train_val_df.sample(frac = 0.8, random_state=3)
val_df = train_val_df.drop(train_df.index)
test_df = df.drop(train_val_df.index)
return train_df.reset_index(drop = True), val_df.reset_index(drop = True), test_df.reset_index(drop = True)_____no_output_____#from student_utils import patient_dataset_splitter
d_train, d_val, d_test = patient_dataset_splitter(processed_df, 'patient_nbr')/root/.local/lib/python3.7/site-packages/pandas/core/frame.py:3509: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
self[k1] = value[k2]
assert len(d_train) + len(d_val) + len(d_test) == len(processed_df)
print("Test passed for number of total rows equal!")Test passed for number of total rows equal!
assert (d_train['patient_nbr'].nunique() + d_val['patient_nbr'].nunique() + d_test['patient_nbr'].nunique()) == agg_drug_df['patient_nbr'].nunique()
print("Test passed for number of unique patients being equal!")Test passed for number of unique patients being equal!
</code>
## Demographic Representation Analysis of Split_____no_output_____After the split, we should check to see the distribution of key features/groups and make sure that there is representative samples across the partitions. The show_group_stats_viz function in the utils.py file can be used to group and visualize different groups and dataframe partitions._____no_output_____### Label Distribution Across Partitions_____no_output_____Below you can see the distributution of the label across your splits. Are the histogram distribution shapes similar across partitions?_____no_output_____
<code>
show_group_stats_viz(processed_df, PREDICTOR_FIELD)time_in_hospital
1.0 10717
2.0 12397
3.0 12701
4.0 9567
5.0 6839
6.0 5171
7.0 3999
8.0 2919
9.0 1990
10.0 1558
11.0 1241
12.0 955
13.0 795
14.0 669
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
show_group_stats_viz(d_train, PREDICTOR_FIELD)time_in_hospital
1.0 6904
2.0 7978
3.0 8116
4.0 6141
5.0 4341
6.0 3294
7.0 2542
8.0 1846
9.0 1259
10.0 989
11.0 788
12.0 659
13.0 500
14.0 414
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
show_group_stats_viz(d_test, PREDICTOR_FIELD)time_in_hospital
1.0 2159
2.0 2486
3.0 2576
4.0 1842
5.0 1364
6.0 1041
7.0 814
8.0 584
9.0 404
10.0 307
11.0 242
12.0 182
13.0 165
14.0 138
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
</code>
## Demographic Group Analysis_____no_output_____We should check that our partitions/splits of the dataset are similar in terms of their demographic profiles. Below you can see how we might visualize and analyze the full dataset vs. the partitions._____no_output_____
<code>
# Full dataset before splitting
patient_demo_features = ['race', 'gender', 'age', 'patient_nbr']
patient_group_analysis_df = processed_df[patient_demo_features].groupby('patient_nbr').head(1).reset_index(drop=True)
show_group_stats_viz(patient_group_analysis_df, 'gender')gender
Female 38025
Male 33490
Unknown/Invalid 3
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
# Training partition
show_group_stats_viz(d_train, 'gender')gender
Female 24197
Male 21572
Unknown/Invalid 2
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
# Test partition
show_group_stats_viz(d_test, 'gender')gender
Female 7631
Male 6672
Unknown/Invalid 1
dtype: int64
AxesSubplot(0.125,0.125;0.775x0.755)
</code>
## Convert Dataset Splits to TF Dataset_____no_output_____We have provided you the function to convert the Pandas dataframe to TF tensors using the TF Dataset API.
Please note that this is not a scalable method and for larger datasets, the 'make_csv_dataset' method is recommended -https://www.tensorflow.org/api_docs/python/tf/data/experimental/make_csv_dataset._____no_output_____
<code>
# Convert dataset from Pandas dataframes to TF dataset
batch_size = 128
diabetes_train_ds = df_to_dataset(d_train, PREDICTOR_FIELD, batch_size=batch_size)
diabetes_val_ds = df_to_dataset(d_val, PREDICTOR_FIELD, batch_size=batch_size)
diabetes_test_ds = df_to_dataset(d_test, PREDICTOR_FIELD, batch_size=batch_size)_____no_output_____# We use this sample of the dataset to show transformations later
diabetes_batch = next(iter(diabetes_train_ds))[0]
def demo(feature_column, example_batch):
feature_layer = tf.keras.layers.DenseFeatures(feature_column)
print(feature_layer(example_batch))_____no_output_____
</code>
# 4. Create Categorical Features with TF Feature Columns_____no_output_____## Build Vocabulary for Categorical Features_____no_output_____Before we can create the TF categorical features, we must first create the vocab files with the unique values for a given field that are from the **training** dataset. Below we have provided a function that you can use that only requires providing the pandas train dataset partition and the list of the categorical columns in a list format. The output variable 'vocab_file_list' will be a list of the file paths that can be used in the next step for creating the categorical features._____no_output_____
<code>
vocab_file_list = build_vocab_files(d_train, student_categorical_col_list)_____no_output_____assert len(vocab_file_list) == len(student_categorical_col_list)_____no_output_____
</code>
## Create Categorical Features with Tensorflow Feature Column API_____no_output_____**Question 7**: Using the vocab file list from above that was derived fromt the features you selected earlier, please create categorical features with the Tensorflow Feature Column API, https://www.tensorflow.org/api_docs/python/tf/feature_column. Below is a function to help guide you._____no_output_____
<code>
def create_tf_categorical_feature_cols(categorical_col_list,
vocab_dir='./diabetes_vocab/'):
'''
categorical_col_list: list, categorical field list that will be transformed with TF feature column
vocab_dir: string, the path where the vocabulary text files are located
return:
output_tf_list: list of TF feature columns
'''
output_tf_list = []
for c in categorical_col_list:
vocab_file_path = os.path.join(vocab_dir, c + "_vocab.txt")
'''
Which TF function allows you to read from a text file and create a categorical feature
You can use a pattern like this below...
tf_categorical_feature_column = tf.feature_column.......
'''
diagnosis_vocab = tf.feature_column.categorical_column_with_vocabulary_file(c, vocab_file_path, num_oov_buckets = 1)
tf_categorical_feature_column = tf.feature_column.indicator_column(diagnosis_vocab)
output_tf_list.append(tf_categorical_feature_column)
return output_tf_list
tf_cat_col_list = create_tf_categorical_feature_cols(student_categorical_col_list)INFO:tensorflow:vocabulary_size = 3 in change is inferred from the number of elements in the vocabulary_file ./diabetes_vocab/change_vocab.txt.
test_cat_var1 = tf_cat_col_list[0]
print("Example categorical field:\n{}".format(test_cat_var1))
demo(test_cat_var1, diabetes_batch)Example categorical field:
IndicatorColumn(categorical_column=VocabularyFileCategoricalColumn(key='change', vocabulary_file='./diabetes_vocab/change_vocab.txt', vocabulary_size=3, num_oov_buckets=1, dtype=tf.string, default_value=-1))
WARNING:tensorflow:Layer dense_features_27 is casting an input tensor from dtype float64 to the layer's dtype of float32, which is new behavior in TensorFlow 2. The layer has dtype float32 because it's dtype defaults to floatx.
If you intended to run this layer in float32, you can safely ignore this warning. If in doubt, this warning is likely only an issue if you are porting a TensorFlow 1.X model to TensorFlow 2.
To change all layers to have dtype float64 by default, call `tf.keras.backend.set_floatx('float64')`. To change just this layer, pass dtype='float64' to the layer constructor. If you are the author of this layer, you can disable autocasting by passing autocast=False to the base Layer constructor.
</code>
# 5. Create Numerical Features with TF Feature Columns_____no_output_____**Question 8**: Using the TF Feature Column API(https://www.tensorflow.org/api_docs/python/tf/feature_column/), please create normalized Tensorflow numeric features for the model. Try to use the z-score normalizer function below to help as well as the 'calculate_stats_from_train_data' function._____no_output_____
<code>
from student_utils import create_tf_numeric_feature
def create_tf_numeric_feature(col, MEAN, STD, default_value=0):
'''
col: string, input numerical column name
MEAN: the mean for the column in the training data
STD: the standard deviation for the column in the training data
default_value: the value that will be used for imputing the field
return:
tf_numeric_feature: tf feature column representation of the input field
'''
normalizer_fn = lambda col, m, s : (col - m) / s
normalizer = partial(normalizer_fn, m = MEAN, s = STD)
tf_numeric_feature = tf.feature_column.numeric_column(col, normalizer_fn = normalizer, dtype = tf.float64,
default_value = default_value)
return tf_numeric_feature_____no_output_____
</code>
For simplicity the create_tf_numerical_feature_cols function below uses the same normalizer function across all features(z-score normalization) but if you have time feel free to analyze and adapt the normalizer based off the statistical distributions. You may find this as a good resource in determining which transformation fits best for the data https://developers.google.com/machine-learning/data-prep/transform/normalization.
_____no_output_____
<code>
def calculate_stats_from_train_data(df, col):
mean = df[col].describe()['mean']
std = df[col].describe()['std']
return mean, std
def create_tf_numerical_feature_cols(numerical_col_list, train_df):
tf_numeric_col_list = []
for c in numerical_col_list:
mean, std = calculate_stats_from_train_data(train_df, c)
tf_numeric_feature = create_tf_numeric_feature(c, mean, std)
tf_numeric_col_list.append(tf_numeric_feature)
return tf_numeric_col_list_____no_output_____tf_cont_col_list = create_tf_numerical_feature_cols(student_numerical_col_list, d_train)_____no_output_____test_cont_var1 = tf_cont_col_list[0]
print("Example continuous field:\n{}\n".format(test_cont_var1))
demo(test_cont_var1, diabetes_batch)Example continuous field:
NumericColumn(key='number_inpatient', shape=(1,), default_value=(0,), dtype=tf.float64, normalizer_fn=functools.partial(<function create_tf_numeric_feature.<locals>.<lambda> at 0x7f7ffe9b6290>, m=0.17600664176006642, s=0.6009985590232482))
WARNING:tensorflow:Layer dense_features_28 is casting an input tensor from dtype float64 to the layer's dtype of float32, which is new behavior in TensorFlow 2. The layer has dtype float32 because it's dtype defaults to floatx.
If you intended to run this layer in float32, you can safely ignore this warning. If in doubt, this warning is likely only an issue if you are porting a TensorFlow 1.X model to TensorFlow 2.
To change all layers to have dtype float64 by default, call `tf.keras.backend.set_floatx('float64')`. To change just this layer, pass dtype='float64' to the layer constructor. If you are the author of this layer, you can disable autocasting by passing autocast=False to the base Layer constructor.
</code>
# 6. Build Deep Learning Regression Model with Sequential API and TF Probability Layers_____no_output_____## Use DenseFeatures to combine features for model_____no_output_____Now that we have prepared categorical and numerical features using Tensorflow's Feature Column API, we can combine them into a dense vector representation for the model. Below we will create this new input layer, which we will call 'claim_feature_layer'._____no_output_____
<code>
claim_feature_columns = tf_cat_col_list + tf_cont_col_list
claim_feature_layer = tf.keras.layers.DenseFeatures(claim_feature_columns)_____no_output_____
</code>
## Build Sequential API Model from DenseFeatures and TF Probability Layers_____no_output_____Below we have provided some boilerplate code for building a model that connects the Sequential API, DenseFeatures, and Tensorflow Probability layers into a deep learning model. There are many opportunities to further optimize and explore different architectures through benchmarking and testing approaches in various research papers, loss and evaluation metrics, learning curves, hyperparameter tuning, TF probability layers, etc. Feel free to modify and explore as you wish._____no_output_____**OPTIONAL**: Come up with a more optimal neural network architecture and hyperparameters. Share the process in discovering the architecture and hyperparameters._____no_output_____
<code>
def build_sequential_model(feature_layer):
model = tf.keras.Sequential([
feature_layer,
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tfp.layers.DenseVariational(1+1, posterior_mean_field, prior_trainable),
tfp.layers.DistributionLambda(
lambda t:tfp.distributions.Normal(loc=t[..., :1],
scale=1e-3 + tf.math.softplus(0.01 * t[...,1:])
)
),
])
return model
def build_diabetes_model(train_ds, val_ds, feature_layer, epochs=5, loss_metric='mse'):
model = build_sequential_model(feature_layer)
model.compile(optimizer='rmsprop', loss=loss_metric, metrics=[loss_metric])
early_stop = tf.keras.callbacks.EarlyStopping(monitor=loss_metric, patience=3)
model_checkpoint = tf.keras.callbacks.ModelCheckpoint('saved_models/bestmodel.h5', monitor='val_loss', verbose=0, save_best_only=True, mode='auto')
history = model.fit(train_ds, validation_data=val_ds,
callbacks=[early_stop],
epochs=epochs)
return model, history _____no_output_____diabetes_model, history = build_diabetes_model(diabetes_train_ds, diabetes_val_ds, claim_feature_layer, epochs=20)Train for 358 steps, validate for 90 steps
Epoch 1/20
358/358 [==============================] - 12s 33ms/step - loss: 25.3230 - mse: 25.1798 - val_loss: 22.8669 - val_mse: 22.5741
Epoch 2/20
358/358 [==============================] - 13s 36ms/step - loss: 15.7285 - mse: 15.1443 - val_loss: 13.9822 - val_mse: 13.2914
Epoch 3/20
358/358 [==============================] - 13s 37ms/step - loss: 12.7352 - mse: 11.9670 - val_loss: 11.7411 - val_mse: 11.0843
Epoch 4/20
358/358 [==============================] - 13s 35ms/step - loss: 11.2485 - mse: 10.4313 - val_loss: 10.4873 - val_mse: 9.5640
Epoch 5/20
358/358 [==============================] - 13s 35ms/step - loss: 10.3242 - mse: 9.5285 - val_loss: 9.8177 - val_mse: 9.1636
Epoch 6/20
358/358 [==============================] - 8s 24ms/step - loss: 10.2267 - mse: 9.4504 - val_loss: 10.4423 - val_mse: 9.6653
Epoch 7/20
358/358 [==============================] - 13s 35ms/step - loss: 9.2538 - mse: 8.3019 - val_loss: 9.8968 - val_mse: 9.1000
Epoch 8/20
358/358 [==============================] - 13s 37ms/step - loss: 8.9934 - mse: 8.1093 - val_loss: 8.8550 - val_mse: 8.1466
Epoch 9/20
358/358 [==============================] - 13s 36ms/step - loss: 8.7026 - mse: 7.9876 - val_loss: 9.2839 - val_mse: 8.8448
Epoch 10/20
358/358 [==============================] - 13s 37ms/step - loss: 8.5294 - mse: 7.6984 - val_loss: 8.0266 - val_mse: 7.2838
Epoch 11/20
358/358 [==============================] - 8s 23ms/step - loss: 8.5896 - mse: 7.7653 - val_loss: 7.9876 - val_mse: 7.3438
Epoch 12/20
358/358 [==============================] - 8s 23ms/step - loss: 8.1592 - mse: 7.3578 - val_loss: 8.5204 - val_mse: 7.5046
Epoch 13/20
358/358 [==============================] - 8s 23ms/step - loss: 8.1387 - mse: 7.3121 - val_loss: 8.0225 - val_mse: 7.3049
Epoch 14/20
358/358 [==============================] - 8s 23ms/step - loss: 7.8314 - mse: 7.0780 - val_loss: 7.9314 - val_mse: 7.0396
Epoch 15/20
358/358 [==============================] - 10s 27ms/step - loss: 7.5737 - mse: 6.7585 - val_loss: 7.8283 - val_mse: 7.0017
Epoch 16/20
358/358 [==============================] - 8s 21ms/step - loss: 7.5256 - mse: 6.8017 - val_loss: 7.5964 - val_mse: 6.9889
Epoch 17/20
358/358 [==============================] - 8s 21ms/step - loss: 7.5827 - mse: 6.7757 - val_loss: 7.8556 - val_mse: 7.1059
Epoch 18/20
358/358 [==============================] - 8s 21ms/step - loss: 7.4014 - mse: 6.5900 - val_loss: 7.4390 - val_mse: 6.5678
Epoch 19/20
358/358 [==============================] - 8s 22ms/step - loss: 7.3756 - mse: 6.5670 - val_loss: 7.4403 - val_mse: 6.6895
Epoch 20/20
358/358 [==============================] - 8s 22ms/step - loss: 7.1834 - mse: 6.3069 - val_loss: 7.8813 - val_mse: 7.2031
</code>
## Show Model Uncertainty Range with TF Probability_____no_output_____**Question 9**: Now that we have trained a model with TF Probability layers, we can extract the mean and standard deviation for each prediction. Please fill in the answer for the m and s variables below. The code for getting the predictions is provided for you below._____no_output_____
<code>
feature_list = student_categorical_col_list + student_numerical_col_list
diabetes_x_tst = dict(d_test[feature_list])
diabetes_yhat = diabetes_model(diabetes_x_tst)
preds = diabetes_model.predict(diabetes_test_ds)_____no_output_____def get_mean_std_from_preds(diabetes_yhat):
'''
diabetes_yhat: TF Probability prediction object
'''
m = diabetes_yhat.mean()
s = diabetes_yhat.stddev()
return m, s
m, s = get_mean_std_from_preds(diabetes_yhat)_____no_output_____
</code>
## Show Prediction Output _____no_output_____
<code>
prob_outputs = {
"pred": preds.flatten(),
"actual_value": d_test['time_in_hospital'].values,
"pred_mean": m.numpy().flatten(),
"pred_std": s.numpy().flatten()
}
prob_output_df = pd.DataFrame(prob_outputs)_____no_output_____prob_output_df.head()_____no_output_____
</code>
## Convert Regression Output to Classification Output for Patient Selection_____no_output_____**Question 10**: Given the output predictions, convert it to a binary label for whether the patient meets the time criteria or does not (HINT: use the mean prediction numpy array). The expected output is a numpy array with a 1 or 0 based off if the prediction meets or doesnt meet the criteria._____no_output_____
<code>
def get_student_binary_prediction(df, col):
'''
df: pandas dataframe prediction output dataframe
col: str, probability mean prediction field
return:
student_binary_prediction: pandas dataframe converting input to flattened numpy array and binary labels
'''
student_binary_prediction = df[col].apply(lambda x : 1 if x >= 5 else 0)
return student_binary_prediction
student_binary_prediction = get_student_binary_prediction(prob_output_df, 'pred_mean')_____no_output_____
</code>
### Add Binary Prediction to Test Dataframe_____no_output_____Using the student_binary_prediction output that is a numpy array with binary labels, we can use this to add to a dataframe to better visualize and also to prepare the data for the Aequitas toolkit. The Aequitas toolkit requires that the predictions be mapped to a binary label for the predictions (called 'score' field) and the actual value (called 'label_value')._____no_output_____
<code>
def add_pred_to_test(test_df, pred_np, demo_col_list):
for c in demo_col_list:
test_df[c] = test_df[c].astype(str)
test_df['score'] = pred_np
test_df['label_value'] = test_df['time_in_hospital'].apply(lambda x: 1 if x >=5 else 0)
return test_df
pred_test_df = add_pred_to_test(d_test, student_binary_prediction, ['race', 'gender'])_____no_output_____pred_test_df[['patient_nbr', 'gender', 'race', 'time_in_hospital', 'score', 'label_value']].head()_____no_output_____
</code>
## Model Evaluation Metrics _____no_output_____**Question 11**: Now it is time to use the newly created binary labels in the 'pred_test_df' dataframe to evaluate the model with some common classification metrics. Please create a report summary of the performance of the model and be sure to give the ROC AUC, F1 score(weighted), class precision and recall scores. _____no_output_____For the report please be sure to include the following three parts:
- With a non-technical audience in mind, explain the precision-recall tradeoff in regard to how you have optimized your model.
- What are some areas of improvement for future iterations?_____no_output_____### Precision-Recall Tradeoff
* Tradeoff means increasing one parameter leads to decreasing of the other.
* Precision is the fraction of correct positives among the total predicted positives.
* Recall is the fraction of correct positives among the total positives in the dataset.
* precision-recall tradeoff occur due to increasing one of the parameter(precision or recall) while keeping the model same.
### Improvements
* Recall seems to be quite low, so we can further try to improve the score._____no_output_____
<code>
# AUC, F1, precision and recall
# Summary
print(classification_report(pred_test_df['label_value'], pred_test_df['score'])) precision recall f1-score support
0 0.85 0.71 0.77 9063
1 0.61 0.78 0.68 5241
accuracy 0.74 14304
macro avg 0.73 0.75 0.73 14304
weighted avg 0.76 0.74 0.74 14304
f1_score(pred_test_df['label_value'], pred_test_df['score'], average='weighted')_____no_output_____accuracy_score(pred_test_df['label_value'], pred_test_df['score'])_____no_output_____roc_auc_score(pred_test_df['label_value'], pred_test_df['score'])_____no_output_____precision_score(pred_test_df['label_value'], pred_test_df['score'])_____no_output_____recall_score(pred_test_df['label_value'], pred_test_df['score'])_____no_output_____
</code>
# 7. Evaluating Potential Model Biases with Aequitas Toolkit_____no_output_____## Prepare Data For Aequitas Bias Toolkit _____no_output_____Using the gender and race fields, we will prepare the data for the Aequitas Toolkit._____no_output_____
<code>
# Aequitas
from aequitas.preprocessing import preprocess_input_df
from aequitas.group import Group
from aequitas.plotting import Plot
from aequitas.bias import Bias
from aequitas.fairness import Fairness
ae_subset_df = pred_test_df[['race', 'gender', 'score', 'label_value']]
ae_df, _ = preprocess_input_df(ae_subset_df)
g = Group()
xtab, _ = g.get_crosstabs(ae_df)
absolute_metrics = g.list_absolute_metrics(xtab)
clean_xtab = xtab.fillna(-1)
aqp = Plot()
b = Bias()
/opt/conda/lib/python3.7/site-packages/aequitas/group.py:143: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
df['score'] = df['score'].astype(float)
</code>
## Reference Group Selection_____no_output_____Below we have chosen the reference group for our analysis but feel free to select another one._____no_output_____
<code>
# test reference group with Caucasian Male
bdf = b.get_disparity_predefined_groups(clean_xtab,
original_df=ae_df,
ref_groups_dict={'race':'Caucasian', 'gender':'Male'
},
alpha=0.05,
check_significance=False)
f = Fairness()
fdf = f.get_group_value_fairness(bdf)get_disparity_predefined_group()
</code>
## Race and Gender Bias Analysis for Patient Selection_____no_output_____**Question 12**: For the gender and race fields, please plot two metrics that are important for patient selection below and state whether there is a significant bias in your model across any of the groups along with justification for your statement._____no_output_____
<code>
# Plot two metrics
# Is there significant bias in your model for either race or gender?_____no_output_____aqp.plot_group_metric(clean_xtab, 'fpr', min_group_size=0.05)_____no_output_____aqp.plot_group_metric(clean_xtab, 'tpr', min_group_size=0.05)_____no_output_____aqp.plot_group_metric(clean_xtab, 'fnr', min_group_size=0.05)_____no_output_____aqp.plot_group_metric(clean_xtab, 'tnr', min_group_size=0.05)_____no_output_____
</code>
#### There isn't any significant bias in the model for either race or gender._____no_output_____## Fairness Analysis Example - Relative to a Reference Group _____no_output_____**Question 13**: Earlier we defined our reference group and then calculated disparity metrics relative to this grouping. Please provide a visualization of the fairness evaluation for this reference group and analyze whether there is disparity._____no_output_____
<code>
# Reference group fairness plot
aqp.plot_fairness_disparity(bdf, group_metric='fnr', attribute_name='race', significance_alpha=0.05, min_group_size=0.05)_____no_output_____aqp.plot_fairness_disparity(fdf, group_metric='fnr', attribute_name='gender', significance_alpha=0.05, min_group_size=0.05)_____no_output_____aqp.plot_fairness_disparity(fdf, group_metric='fpr', attribute_name='race', significance_alpha=0.05, min_group_size=0.05)_____no_output_____
</code>
#### There isn't any disparity in the model for either race or gender._____no_output_____
<code>
aqp.plot_fairness_group(fdf, group_metric='fpr', title=True, min_group_size=0.05)_____no_output_____aqp.plot_fairness_group(fdf, group_metric='fnr', title=True)_____no_output_____
</code>
#### Nearly all races and gender seem to have the same probability of falsely non-identifying them. The model is unbiased towards race or gender._____no_output_____
| {
"repository": "aaryapatel007/Patient-Selection-for-Diabetes-Drug-Testing",
"path": "code/student_project.ipynb",
"matched_keywords": [
"clinical trials"
],
"stars": null,
"size": 609304,
"hexsha": "d095e1897b6fe283c2ac4c3b5380cd054b65d3f3",
"max_line_length": 95703,
"avg_line_length": 125.3969952665,
"alphanum_fraction": 0.815304019
} |
# Notebook from arinmuk/python_apis
Path: 1/Activities/04-Stu_FarFarAway/Solved/.ipynb_checkpoints/faraway-arm-checkpoint.ipynb
<code>
# Dependencies
import requests
import json_____no_output_____# URL for GET requests to retrieve Star Wars character data
base_url = "https://swapi.co/api/people/"_____no_output_____# Create a url with a specific character id
character_id = '4'
url = base_url + character_id
print(url)https://swapi.co/api/people/4
# Perform a get request for this character
response = requests.get(url)
print(response.url)https://swapi.co/api/people/4/
# Storing the JSON response within a variable
data = response.json()
print(json.dumps(data, indent=4, sort_keys=True)){
"birth_year": "41.9BBY",
"created": "2014-12-10T15:18:20.704000Z",
"edited": "2014-12-20T21:17:50.313000Z",
"eye_color": "yellow",
"films": [
"https://swapi.co/api/films/2/",
"https://swapi.co/api/films/6/",
"https://swapi.co/api/films/3/",
"https://swapi.co/api/films/1/"
],
"gender": "male",
"hair_color": "none",
"height": "202",
"homeworld": "https://swapi.co/api/planets/1/",
"mass": "136",
"name": "Darth Vader",
"skin_color": "white",
"species": [
"https://swapi.co/api/species/1/"
],
"starships": [
"https://swapi.co/api/starships/13/"
],
"url": "https://swapi.co/api/people/4/",
"vehicles": []
}
# Collecting the name of the character collected
character_name = data["name"]_____no_output_____# Counting how many films the character was in
film_number = len(data["films"])_____no_output_____# Figure out what their first starship was
first_ship_url = data["starships"][0]
ship_response = requests.get(first_ship_url).json()
ship_response_____no_output_____first_ship = ship_response["name"]_____no_output_____# Print character name and how many films they were in
print(f"{character_name} was in {film_number} films")Darth Vader was in 4 films
# Print what their first ship was
print(f"Their first ship: {first_ship}")Their first ship: TIE Advanced x1
# BONUS
films = []
for film in data['films']:
cur_film = requests.get(film).json()
film_title = cur_film["title"]
films.append(film_title)
print(f"{character_name} was in:")
print(films)Darth Vader was in:
['The Empire Strikes Back', 'Revenge of the Sith', 'Return of the Jedi', 'A New Hope']
</code>
| {
"repository": "arinmuk/python_apis",
"path": "1/Activities/04-Stu_FarFarAway/Solved/.ipynb_checkpoints/faraway-arm-checkpoint.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 7092,
"hexsha": "d09745cd35142f81a959979d4e5374c43663015d",
"max_line_length": 96,
"avg_line_length": 22.8038585209,
"alphanum_fraction": 0.4827975183
} |
# Notebook from gr8khan/d2lai
Path: d2l-en/mxnet/chapter_appendix-mathematics-for-deep-learning/random-variables.ipynb
# Random Variables
:label:`sec_random_variables`
In :numref:`sec_prob` we saw the basics of how to work with discrete random variables, which in our case refer to those random variables which take either a finite set of possible values, or the integers. In this section, we develop the theory of *continuous random variables*, which are random variables which can take on any real value.
## Continuous Random Variables
Continuous random variables are a significantly more subtle topic than discrete random variables. A fair analogy to make is that the technical jump is comparable to the jump between adding lists of numbers and integrating functions. As such, we will need to take some time to develop the theory.
### From Discrete to Continuous
To understand the additional technical challenges encountered when working with continuous random variables, let us perform a thought experiment. Suppose that we are throwing a dart at the dart board, and we want to know the probability that it hits exactly $2 \text{cm}$ from the center of the board.
To start with, we imagine measuring a single digit of accuracy, that is to say with bins for $0 \text{cm}$, $1 \text{cm}$, $2 \text{cm}$, and so on. We throw say $100$ darts at the dart board, and if $20$ of them fall into the bin for $2\text{cm}$ we conclude that $20\%$ of the darts we throw hit the board $2 \text{cm}$ away from the center.
However, when we look closer, this does not match our question! We wanted exact equality, whereas these bins hold all that fell between say $1.5\text{cm}$ and $2.5\text{cm}$.
Undeterred, we continue further. We measure even more precisely, say $1.9\text{cm}$, $2.0\text{cm}$, $2.1\text{cm}$, and now see that perhaps $3$ of the $100$ darts hit the board in the $2.0\text{cm}$ bucket. Thus we conclude the probability is $3\%$.
However, this does not solve anything! We have just pushed the issue down one digit further. Let us abstract a bit. Imagine we know the probability that the first $k$ digits match with $2.00000\ldots$ and we want to know the probability it matches for the first $k+1$ digits. It is fairly reasonable to assume that the ${k+1}^{\mathrm{th}}$ digit is essentially a random choice from the set $\{0, 1, 2, \ldots, 9\}$. At least, we cannot conceive of a physically meaningful process which would force the number of micrometers away form the center to prefer to end in a $7$ vs a $3$.
What this means is that in essence each additional digit of accuracy we require should decrease probability of matching by a factor of $10$. Or put another way, we would expect that
$$
P(\text{distance is}\; 2.00\ldots, \;\text{to}\; k \;\text{digits} ) \approx p\cdot10^{-k}.
$$
The value $p$ essentially encodes what happens with the first few digits, and the $10^{-k}$ handles the rest.
Notice that if we know the position accurate to $k=4$ digits after the decimal. that means we know the value falls within the interval say $[(1.99995,2.00005]$ which is an interval of length $2.00005-1.99995 = 10^{-4}$. Thus, if we call the length of this interval $\epsilon$, we can say
$$
P(\text{distance is in an}\; \epsilon\text{-sized interval around}\; 2 ) \approx \epsilon \cdot p.
$$
Let us take this one final step further. We have been thinking about the point $2$ the entire time, but never thinking about other points. Nothing is different there fundamentally, but it is the case that the value $p$ will likely be different. We would at least hope that a dart thrower was more likely to hit a point near the center, like $2\text{cm}$ rather than $20\text{cm}$. Thus, the value $p$ is not fixed, but rather should depend on the point $x$. This tells us that we should expect
$$P(\text{distance is in an}\; \epsilon \text{-sized interval around}\; x ) \approx \epsilon \cdot p(x).$$
:eqlabel:`eq_pdf_deriv`
Indeed, :eqref:`eq_pdf_deriv` precisely defines the *probability density function*. It is a function $p(x)$ which encodes the relative probability of hitting near one point vs. another. Let us visualize what such a function might look like.
_____no_output_____
<code>
%matplotlib inline
from d2l import mxnet as d2l
from IPython import display
from mxnet import np, npx
npx.set_np()
# Plot the probability density function for some random variable
x = np.arange(-5, 5, 0.01)
p = 0.2*np.exp(-(x - 3)**2 / 2)/np.sqrt(2 * np.pi) + \
0.8*np.exp(-(x + 1)**2 / 2)/np.sqrt(2 * np.pi)
d2l.plot(x, p, 'x', 'Density')_____no_output_____
</code>
The locations where the function value is large indicates regions where we are more likely to find the random value. The low portions are areas where we are unlikely to find the random value.
### Probability Density Functions
Let us now investigate this further. We have already seen what a probability density function is intuitively for a random variable $X$, namely the density function is a function $p(x)$ so that
$$P(X \; \text{is in an}\; \epsilon \text{-sized interval around}\; x ) \approx \epsilon \cdot p(x).$$
:eqlabel:`eq_pdf_def`
But what does this imply for the properties of $p(x)$?
First, probabilities are never negative, thus we should expect that $p(x) \ge 0$ as well.
Second, let us imagine that we slice up the $\mathbb{R}$ into an infinite number of slices which are $\epsilon$ wide, say with slices $(\epsilon\cdot i, \epsilon \cdot (i+1)]$. For each of these, we know from :eqref:`eq_pdf_def` the probability is approximately
$$
P(X \; \text{is in an}\; \epsilon\text{-sized interval around}\; x ) \approx \epsilon \cdot p(\epsilon \cdot i),
$$
so summed over all of them it should be
$$
P(X\in\mathbb{R}) \approx \sum_i \epsilon \cdot p(\epsilon\cdot i).
$$
This is nothing more than the approximation of an integral discussed in :numref:`sec_integral_calculus`, thus we can say that
$$
P(X\in\mathbb{R}) = \int_{-\infty}^{\infty} p(x) \; dx.
$$
We know that $P(X\in\mathbb{R}) = 1$, since the random variable must take on *some* number, we can conclude that for any density
$$
\int_{-\infty}^{\infty} p(x) \; dx = 1.
$$
Indeed, digging into this further shows that for any $a$, and $b$, we see that
$$
P(X\in(a, b]) = \int _ {a}^{b} p(x) \; dx.
$$
We may approximate this in code by using the same discrete approximation methods as before. In this case we can approximate the probability of falling in the blue region.
_____no_output_____
<code>
# Approximate probability using numerical integration
epsilon = 0.01
x = np.arange(-5, 5, 0.01)
p = 0.2*np.exp(-(x - 3)**2 / 2) / np.sqrt(2 * np.pi) + \
0.8*np.exp(-(x + 1)**2 / 2) / np.sqrt(2 * np.pi)
d2l.set_figsize()
d2l.plt.plot(x, p, color='black')
d2l.plt.fill_between(x.tolist()[300:800], p.tolist()[300:800])
d2l.plt.show()
f'approximate Probability: {np.sum(epsilon*p[300:800])}'_____no_output_____
</code>
It turns out that these two properties describe exactly the space of possible probability density functions (or *p.d.f.*'s for the commonly encountered abbreviation). They are non-negative functions $p(x) \ge 0$ such that
$$\int_{-\infty}^{\infty} p(x) \; dx = 1.$$
:eqlabel:`eq_pdf_int_one`
We interpret this function by using integration to obtain the probability our random variable is in a specific interval:
$$P(X\in(a, b]) = \int _ {a}^{b} p(x) \; dx.$$
:eqlabel:`eq_pdf_int_int`
In :numref:`sec_distributions` we will see a number of common distributions, but let us continue working in the abstract.
### Cumulative Distribution Functions
In the previous section, we saw the notion of the p.d.f. In practice, this is a commonly encountered method to discuss continuous random variables, but it has one significant pitfall: that the values of the p.d.f. are not themselves probabilities, but rather a function that we must integrate to yield probabilities. There is nothing wrong with a density being larger than $10$, as long as it is not larger than $10$ for more than an interval of length $1/10$. This can be counter-intuitive, so people often also think in terms of the *cumulative distribution function*, or c.d.f., which *is* a probability.
In particular, by using :eqref:`eq_pdf_int_int`, we define the c.d.f. for a random variable $X$ with density $p(x)$ by
$$
F(x) = \int _ {-\infty}^{x} p(x) \; dx = P(X \le x).
$$
Let us observe a few properties.
* $F(x) \rightarrow 0$ as $x\rightarrow -\infty$.
* $F(x) \rightarrow 1$ as $x\rightarrow \infty$.
* $F(x)$ is non-decreasing ($y > x \implies F(y) \ge F(x)$).
* $F(x)$ is continuous (has no jumps) if $X$ is a continuous random variable.
With the fourth bullet point, note that this would not be true if $X$ were discrete, say taking the values $0$ and $1$ both with probability $1/2$. In that case
$$
F(x) = \begin{cases}
0 & x < 0, \\
\frac{1}{2} & x < 1, \\
1 & x \ge 1.
\end{cases}
$$
In this example, we see one of the benefits of working with the c.d.f., the ability to deal with continuous or discrete random variables in the same framework, or indeed mixtures of the two (flip a coin: if heads return the roll of a die, if tails return the distance of a dart throw from the center of a dart board).
### Means
Suppose that we are dealing with a random variables $X$. The distribution itself can be hard to interpret. It is often useful to be able to summarize the behavior of a random variable concisely. Numbers that help us capture the behavior of a random variable are called *summary statistics*. The most commonly encountered ones are the *mean*, the *variance*, and the *standard deviation*.
The *mean* encodes the average value of a random variable. If we have a discrete random variable $X$, which takes the values $x_i$ with probabilities $p_i$, then the mean is given by the weighted average: sum the values times the probability that the random variable takes on that value:
$$\mu_X = E[X] = \sum_i x_i p_i.$$
:eqlabel:`eq_exp_def`
The way we should interpret the mean (albeit with caution) is that it tells us essentially where the random variable tends to be located.
As a minimalistic example that we will examine throughout this section, let us take $X$ to be the random variable which takes the value $a-2$ with probability $p$, $a+2$ with probability $p$ and $a$ with probability $1-2p$. We can compute using :eqref:`eq_exp_def` that, for any possible choice of $a$ and $p$, the mean is
$$
\mu_X = E[X] = \sum_i x_i p_i = (a-2)p + a(1-2p) + (a+2)p = a.
$$
Thus we see that the mean is $a$. This matches the intuition since $a$ is the location around which we centered our random variable.
Because they are helpful, let us summarize a few properties.
* For any random variable $X$ and numbers $a$ and $b$, we have that $\mu_{aX+b} = a\mu_X + b$.
* If we have two random variables $X$ and $Y$, we have $\mu_{X+Y} = \mu_X+\mu_Y$.
Means are useful for understanding the average behavior of a random variable, however the mean is not sufficient to even have a full intuitive understanding. Making a profit of $\$10 \pm \$1$ per sale is very different from making $\$10 \pm \$15$ per sale despite having the same average value. The second one has a much larger degree of fluctuation, and thus represents a much larger risk. Thus, to understand the behavior of a random variable, we will need at minimum one more measure: some measure of how widely a random variable fluctuates.
### Variances
This leads us to consider the *variance* of a random variable. This is a quantitative measure of how far a random variable deviates from the mean. Consider the expression $X - \mu_X$. This is the deviation of the random variable from its mean. This value can be positive or negative, so we need to do something to make it positive so that we are measuring the magnitude of the deviation.
A reasonable thing to try is to look at $\left|X-\mu_X\right|$, and indeed this leads to a useful quantity called the *mean absolute deviation*, however due to connections with other areas of mathematics and statistics, people often use a different solution.
In particular, they look at $(X-\mu_X)^2.$ If we look at the typical size of this quantity by taking the mean, we arrive at the variance
$$\sigma_X^2 = \mathrm{Var}(X) = E\left[(X-\mu_X)^2\right] = E[X^2] - \mu_X^2.$$
:eqlabel:`eq_var_def`
The last equality in :eqref:`eq_var_def` holds by expanding out the definition in the middle, and applying the properties of expectation.
Let us look at our example where $X$ is the random variable which takes the value $a-2$ with probability $p$, $a+2$ with probability $p$ and $a$ with probability $1-2p$. In this case $\mu_X = a$, so all we need to compute is $E\left[X^2\right]$. This can readily be done:
$$
E\left[X^2\right] = (a-2)^2p + a^2(1-2p) + (a+2)^2p = a^2 + 8p.
$$
Thus, we see that by :eqref:`eq_var_def` our variance is
$$
\sigma_X^2 = \mathrm{Var}(X) = E[X^2] - \mu_X^2 = a^2 + 8p - a^2 = 8p.
$$
This result again makes sense. The largest $p$ can be is $1/2$ which corresponds to picking $a-2$ or $a+2$ with a coin flip. The variance of this being $4$ corresponds to the fact that both $a-2$ and $a+2$ are $2$ units away from the mean, and $2^2 = 4$. On the other end of the spectrum, if $p=0$, this random variable always takes the value $0$ and so it has no variance at all.
We will list a few properties of variance below:
* For any random variable $X$, $\mathrm{Var}(X) \ge 0$, with $\mathrm{Var}(X) = 0$ if and only if $X$ is a constant.
* For any random variable $X$ and numbers $a$ and $b$, we have that $\mathrm{Var}(aX+b) = a^2\mathrm{Var}(X)$.
* If we have two *independent* random variables $X$ and $Y$, we have $\mathrm{Var}(X+Y) = \mathrm{Var}(X) + \mathrm{Var}(Y)$.
When interpreting these values, there can be a bit of a hiccup. In particular, let us try imagining what happens if we keep track of units through this computation. Suppose that we are working with the star rating assigned to a product on the web page. Then $a$, $a-2$, and $a+2$ are all measured in units of stars. Similarly, the mean $\mu_X$ is then also measured in stars (being a weighted average). However, if we get to the variance, we immediately encounter an issue, which is we want to look at $(X-\mu_X)^2$, which is in units of *squared stars*. This means that the variance itself is not comparable to the original measurements. To make it interpretable, we will need to return to our original units.
### Standard Deviations
This summary statistics can always be deduced from the variance by taking the square root! Thus we define the *standard deviation* to be
$$
\sigma_X = \sqrt{\mathrm{Var}(X)}.
$$
In our example, this means we now have the standard deviation is $\sigma_X = 2\sqrt{2p}$. If we are dealing with units of stars for our review example, $\sigma_X$ is again in units of stars.
The properties we had for the variance can be restated for the standard deviation.
* For any random variable $X$, $\sigma_{X} \ge 0$.
* For any random variable $X$ and numbers $a$ and $b$, we have that $\sigma_{aX+b} = |a|\sigma_{X}$
* If we have two *independent* random variables $X$ and $Y$, we have $\sigma_{X+Y} = \sqrt{\sigma_{X}^2 + \sigma_{Y}^2}$.
It is natural at this moment to ask, "If the standard deviation is in the units of our original random variable, does it represent something we can draw with regards to that random variable?" The answer is a resounding yes! Indeed much like the mean told we the typical location of our random variable, the standard deviation gives the typical range of variation of that random variable. We can make this rigorous with what is known as Chebyshev's inequality:
$$P\left(X \not\in [\mu_X - \alpha\sigma_X, \mu_X + \alpha\sigma_X]\right) \le \frac{1}{\alpha^2}.$$
:eqlabel:`eq_chebyshev`
Or to state it verbally in the case of $\alpha=10$, $99\%$ of the samples from any random variable fall within $10$ standard deviations of the mean. This gives an immediate interpretation to our standard summary statistics.
To see how this statement is rather subtle, let us take a look at our running example again where $X$ is the random variable which takes the value $a-2$ with probability $p$, $a+2$ with probability $p$ and $a$ with probability $1-2p$. We saw that the mean was $a$ and the standard deviation was $2\sqrt{2p}$. This means, if we take Chebyshev's inequality :eqref:`eq_chebyshev` with $\alpha = 2$, we see that the expression is
$$
P\left(X \not\in [a - 4\sqrt{2p}, a + 4\sqrt{2p}]\right) \le \frac{1}{4}.
$$
This means that $75\%$ of the time, this random variable will fall within this interval for any value of $p$. Now, notice that as $p \rightarrow 0$, this interval also converges to the single point $a$. But we know that our random variable takes the values $a-2, a$, and $a+2$ only so eventually we can be certain $a-2$ and $a+2$ will fall outside the interval! The question is, at what $p$ does that happen. So we want to solve: for what $p$ does $a+4\sqrt{2p} = a+2$, which is solved when $p=1/8$, which is *exactly* the first $p$ where it could possibly happen without violating our claim that no more than $1/4$ of samples from the distribution would fall outside the interval ($1/8$ to the left, and $1/8$ to the right).
Let us visualize this. We will show the probability of getting the three values as three vertical bars with height proportional to the probability. The interval will be drawn as a horizontal line in the middle. The first plot shows what happens for $p > 1/8$ where the interval safely contains all points.
_____no_output_____
<code>
# Define a helper to plot these figures
def plot_chebyshev(a, p):
d2l.set_figsize()
d2l.plt.stem([a-2, a, a+2], [p, 1-2*p, p], use_line_collection=True)
d2l.plt.xlim([-4, 4])
d2l.plt.xlabel('x')
d2l.plt.ylabel('p.m.f.')
d2l.plt.hlines(0.5, a - 4 * np.sqrt(2 * p),
a + 4 * np.sqrt(2 * p), 'black', lw=4)
d2l.plt.vlines(a - 4 * np.sqrt(2 * p), 0.53, 0.47, 'black', lw=1)
d2l.plt.vlines(a + 4 * np.sqrt(2 * p), 0.53, 0.47, 'black', lw=1)
d2l.plt.title(f'p = {p:.3f}')
d2l.plt.show()
# Plot interval when p > 1/8
plot_chebyshev(0.0, 0.2)_____no_output_____
</code>
The second shows that at $p = 1/8$, the interval exactly touches the two points. This shows that the inequality is *sharp*, since no smaller interval could be taken while keeping the inequality true.
_____no_output_____
<code>
# Plot interval when p = 1/8
plot_chebyshev(0.0, 0.125)_____no_output_____
</code>
The third shows that for $p < 1/8$ the interval only contains the center. This does not invalidate the inequality since we only needed to ensure that no more than $1/4$ of the probability falls outside the interval, which means that once $p < 1/8$, the two points at $a-2$ and $a+2$ can be discarded.
_____no_output_____
<code>
# Plot interval when p < 1/8
plot_chebyshev(0.0, 0.05)_____no_output_____
</code>
### Means and Variances in the Continuum
This has all been in terms of discrete random variables, but the case of continuous random variables is similar. To intuitively understand how this works, imagine that we split the real number line into intervals of length $\epsilon$ given by $(\epsilon i, \epsilon (i+1)]$. Once we do this, our continuous random variable has been made discrete and we can use :eqref:`eq_exp_def` say that
$$
\begin{aligned}
\mu_X & \approx \sum_{i} (\epsilon i)P(X \in (\epsilon i, \epsilon (i+1)]) \\
& \approx \sum_{i} (\epsilon i)p_X(\epsilon i)\epsilon, \\
\end{aligned}
$$
where $p_X$ is the density of $X$. This is an approximation to the integral of $xp_X(x)$, so we can conclude that
$$
\mu_X = \int_{-\infty}^\infty xp_X(x) \; dx.
$$
Similarly, using :eqref:`eq_var_def` the variance can be written as
$$
\sigma^2_X = E[X^2] - \mu_X^2 = \int_{-\infty}^\infty x^2p_X(x) \; dx - \left(\int_{-\infty}^\infty xp_X(x) \; dx\right)^2.
$$
Everything stated above about the mean, the variance, and the standard deviation still applies in this case. For instance, if we consider the random variable with density
$$
p(x) = \begin{cases}
1 & x \in [0,1], \\
0 & \text{otherwise}.
\end{cases}
$$
we can compute
$$
\mu_X = \int_{-\infty}^\infty xp(x) \; dx = \int_0^1 x \; dx = \frac{1}{2}.
$$
and
$$
\sigma_X^2 = \int_{-\infty}^\infty x^2p(x) \; dx - \left(\frac{1}{2}\right)^2 = \frac{1}{3} - \frac{1}{4} = \frac{1}{12}.
$$
As a warning, let us examine one more example, known as the *Cauchy distribution*. This is the distribution with p.d.f. given by
$$
p(x) = \frac{1}{1+x^2}.
$$
_____no_output_____
<code>
# Plot the Cauchy distribution p.d.f.
x = np.arange(-5, 5, 0.01)
p = 1 / (1 + x**2)
d2l.plot(x, p, 'x', 'p.d.f.')_____no_output_____
</code>
This function looks innocent, and indeed consulting a table of integrals will show it has area one under it, and thus it defines a continuous random variable.
To see what goes astray, let us try to compute the variance of this. This would involve using :eqref:`eq_var_def` computing
$$
\int_{-\infty}^\infty \frac{x^2}{1+x^2}\; dx.
$$
The function on the inside looks like this:
_____no_output_____
<code>
# Plot the integrand needed to compute the variance
x = np.arange(-20, 20, 0.01)
p = x**2 / (1 + x**2)
d2l.plot(x, p, 'x', 'integrand')_____no_output_____
</code>
This function clearly has infinite area under it since it is essentially the constant one with a small dip near zero, and indeed we could show that
$$
\int_{-\infty}^\infty \frac{x^2}{1+x^2}\; dx = \infty.
$$
This means it does not have a well-defined finite variance.
However, looking deeper shows an even more disturbing result. Let us try to compute the mean using :eqref:`eq_exp_def`. Using the change of variables formula, we see
$$
\mu_X = \int_{-\infty}^{\infty} \frac{x}{1+x^2} \; dx = \frac{1}{2}\int_1^\infty \frac{1}{u} \; du.
$$
The integral inside is the definition of the logarithm, so this is in essence $\log(\infty) = \infty$, so there is no well-defined average value either!
Machine learning scientists define their models so that we most often do not need to deal with these issues, and will in the vast majority of cases deal with random variables with well-defined means and variances. However, every so often random variables with *heavy tails* (that is those random variables where the probabilities of getting large values are large enough to make things like the mean or variance undefined) are helpful in modeling physical systems, thus it is worth knowing that they exist.
### Joint Density Functions
The above work all assumes we are working with a single real valued random variable. But what if we are dealing with two or more potentially highly correlated random variables? This circumstance is the norm in machine learning: imagine random variables like $R_{i, j}$ which encode the red value of the pixel at the $(i, j)$ coordinate in an image, or $P_t$ which is a random variable given by a stock price at time $t$. Nearby pixels tend to have similar color, and nearby times tend to have similar prices. We cannot treat them as separate random variables, and expect to create a successful model (we will see in :numref:`sec_naive_bayes` a model that under-performs due to such an assumption). We need to develop the mathematical language to handle these correlated continuous random variables.
Thankfully, with the multiple integrals in :numref:`sec_integral_calculus` we can develop such a language. Suppose that we have, for simplicity, two random variables $X, Y$ which can be correlated. Then, similar to the case of a single variable, we can ask the question:
$$
P(X \;\text{is in an}\; \epsilon \text{-sized interval around}\; x \; \text{and} \;Y \;\text{is in an}\; \epsilon \text{-sized interval around}\; y ).
$$
Similar reasoning to the single variable case shows that this should be approximately
$$
P(X \;\text{is in an}\; \epsilon \text{-sized interval around}\; x \; \text{and} \;Y \;\text{is in an}\; \epsilon \text{-sized interval around}\; y ) \approx \epsilon^{2}p(x, y),
$$
for some function $p(x, y)$. This is referred to as the joint density of $X$ and $Y$. Similar properties are true for this as we saw in the single variable case. Namely:
* $p(x, y) \ge 0$;
* $\int _ {\mathbb{R}^2} p(x, y) \;dx \;dy = 1$;
* $P((X, Y) \in \mathcal{D}) = \int _ {\mathcal{D}} p(x, y) \;dx \;dy$.
In this way, we can deal with multiple, potentially correlated random variables. If we wish to work with more than two random variables, we can extend the multivariate density to as many coordinates as desired by considering $p(\mathbf{x}) = p(x_1, \ldots, x_n)$. The same properties of being non-negative, and having total integral of one still hold.
### Marginal Distributions
When dealing with multiple variables, we oftentimes want to be able to ignore the relationships and ask, "how is this one variable distributed?" Such a distribution is called a *marginal distribution*.
To be concrete, let us suppose that we have two random variables $X, Y$ with joint density given by $p _ {X, Y}(x, y)$. We will be using the subscript to indicate what random variables the density is for. The question of finding the marginal distribution is taking this function, and using it to find $p _ X(x)$.
As with most things, it is best to return to the intuitive picture to figure out what should be true. Recall that the density is the function $p _ X$ so that
$$
P(X \in [x, x+\epsilon]) \approx \epsilon \cdot p _ X(x).
$$
There is no mention of $Y$, but if all we are given is $p _{X, Y}$, we need to include $Y$ somehow. We can first observe that this is the same as
$$
P(X \in [x, x+\epsilon] \text{, and } Y \in \mathbb{R}) \approx \epsilon \cdot p _ X(x).
$$
Our density does not directly tell us about what happens in this case, we need to split into small intervals in $y$ as well, so we can write this as
$$
\begin{aligned}
\epsilon \cdot p _ X(x) & \approx \sum _ {i} P(X \in [x, x+\epsilon] \text{, and } Y \in [\epsilon \cdot i, \epsilon \cdot (i+1)]) \\
& \approx \sum _ {i} \epsilon^{2} p _ {X, Y}(x, \epsilon\cdot i).
\end{aligned}
$$

:label:`fig_marginal`
This tells us to add up the value of the density along a series of squares in a line as is shown in :numref:`fig_marginal`. Indeed, after canceling one factor of epsilon from both sides, and recognizing the sum on the right is the integral over $y$, we can conclude that
$$
\begin{aligned}
p _ X(x) & \approx \sum _ {i} \epsilon p _ {X, Y}(x, \epsilon\cdot i) \\
& \approx \int_{-\infty}^\infty p_{X, Y}(x, y) \; dy.
\end{aligned}
$$
Thus we see
$$
p _ X(x) = \int_{-\infty}^\infty p_{X, Y}(x, y) \; dy.
$$
This tells us that to get a marginal distribution, we integrate over the variables we do not care about. This process is often referred to as *integrating out* or *marginalized out* the unneeded variables.
### Covariance
When dealing with multiple random variables, there is one additional summary statistic which is helpful to know: the *covariance*. This measures the degree that two random variable fluctuate together.
Suppose that we have two random variables $X$ and $Y$, to begin with, let us suppose they are discrete, taking on values $(x_i, y_j)$ with probability $p_{ij}$. In this case, the covariance is defined as
$$\sigma_{XY} = \mathrm{Cov}(X, Y) = \sum_{i, j} (x_i - \mu_X) (y_j-\mu_Y) p_{ij}. = E[XY] - E[X]E[Y].$$
:eqlabel:`eq_cov_def`
To think about this intuitively: consider the following pair of random variables. Suppose that $X$ takes the values $1$ and $3$, and $Y$ takes the values $-1$ and $3$. Suppose that we have the following probabilities
$$
\begin{aligned}
P(X = 1 \; \text{and} \; Y = -1) & = \frac{p}{2}, \\
P(X = 1 \; \text{and} \; Y = 3) & = \frac{1-p}{2}, \\
P(X = 3 \; \text{and} \; Y = -1) & = \frac{1-p}{2}, \\
P(X = 3 \; \text{and} \; Y = 3) & = \frac{p}{2},
\end{aligned}
$$
where $p$ is a parameter in $[0,1]$ we get to pick. Notice that if $p=1$ then they are both always their minimum or maximum values simultaneously, and if $p=0$ they are guaranteed to take their flipped values simultaneously (one is large when the other is small and vice versa). If $p=1/2$, then the four possibilities are all equally likely, and neither should be related. Let us compute the covariance. First, note $\mu_X = 2$ and $\mu_Y = 1$, so we may compute using :eqref:`eq_cov_def`:
$$
\begin{aligned}
\mathrm{Cov}(X, Y) & = \sum_{i, j} (x_i - \mu_X) (y_j-\mu_Y) p_{ij} \\
& = (1-2)(-1-1)\frac{p}{2} + (1-2)(3-1)\frac{1-p}{2} + (3-2)(-1-1)\frac{1-p}{2} + (3-2)(3-1)\frac{p}{2} \\
& = 4p-2.
\end{aligned}
$$
When $p=1$ (the case where they are both maximally positive or negative at the same time) has a covariance of $2$. When $p=0$ (the case where they are flipped) the covariance is $-2$. Finally, when $p=1/2$ (the case where they are unrelated), the covariance is $0$. Thus we see that the covariance measures how these two random variables are related.
A quick note on the covariance is that it only measures these linear relationships. More complex relationships like $X = Y^2$ where $Y$ is randomly chosen from $\{-2, -1, 0, 1, 2\}$ with equal probability can be missed. Indeed a quick computation shows that these random variables have covariance zero, despite one being a deterministic function of the other.
For continuous random variables, much the same story holds. At this point, we are pretty comfortable with doing the transition between discrete and continuous, so we will provide the continuous analogue of :eqref:`eq_cov_def` without any derivation.
$$
\sigma_{XY} = \int_{\mathbb{R}^2} (x-\mu_X)(y-\mu_Y)p(x, y) \;dx \;dy.
$$
For visualization, let us take a look at a collection of random variables with tunable covariance.
_____no_output_____
<code>
# Plot a few random variables adjustable covariance
covs = [-0.9, 0.0, 1.2]
d2l.plt.figure(figsize=(12, 3))
for i in range(3):
X = np.random.normal(0, 1, 500)
Y = covs[i]*X + np.random.normal(0, 1, (500))
d2l.plt.subplot(1, 4, i+1)
d2l.plt.scatter(X.asnumpy(), Y.asnumpy())
d2l.plt.xlabel('X')
d2l.plt.ylabel('Y')
d2l.plt.title(f'cov = {covs[i]}')
d2l.plt.show()_____no_output_____
</code>
Let us see some properties of covariances:
* For any random variable $X$, $\mathrm{Cov}(X, X) = \mathrm{Var}(X)$.
* For any random variables $X, Y$ and numbers $a$ and $b$, $\mathrm{Cov}(aX+b, Y) = \mathrm{Cov}(X, aY+b) = a\mathrm{Cov}(X, Y)$.
* If $X$ and $Y$ are independent then $\mathrm{Cov}(X, Y) = 0$.
In addition, we can use the covariance to expand a relationship we saw before. Recall that is $X$ and $Y$ are two independent random variables then
$$
\mathrm{Var}(X+Y) = \mathrm{Var}(X) + \mathrm{Var}(Y).
$$
With knowledge of covariances, we can expand this relationship. Indeed, some algebra can show that in general,
$$
\mathrm{Var}(X+Y) = \mathrm{Var}(X) + \mathrm{Var}(Y) + 2\mathrm{Cov}(X, Y).
$$
This allows us to generalize the variance summation rule for correlated random variables.
### Correlation
As we did in the case of means and variances, let us now consider units. If $X$ is measured in one unit (say inches), and $Y$ is measured in another (say dollars), the covariance is measured in the product of these two units $\text{inches} \times \text{dollars}$. These units can be hard to interpret. What we will often want in this case is a unit-less measurement of relatedness. Indeed, often we do not care about exact quantitative correlation, but rather ask if the correlation is in the same direction, and how strong the relationship is.
To see what makes sense, let us perform a thought experiment. Suppose that we convert our random variables in inches and dollars to be in inches and cents. In this case the random variable $Y$ is multiplied by $100$. If we work through the definition, this means that $\mathrm{Cov}(X, Y)$ will be multiplied by $100$. Thus we see that in this case a change of units change the covariance by a factor of $100$. Thus, to find our unit-invariant measure of correlation, we will need to divide by something else that also gets scaled by $100$. Indeed we have a clear candidate, the standard deviation! Indeed if we define the *correlation coefficient* to be
$$\rho(X, Y) = \frac{\mathrm{Cov}(X, Y)}{\sigma_{X}\sigma_{Y}},$$
:eqlabel:`eq_cor_def`
we see that this is a unit-less value. A little mathematics can show that this number is between $-1$ and $1$ with $1$ meaning maximally positively correlated, whereas $-1$ means maximally negatively correlated.
Returning to our explicit discrete example above, we can see that $\sigma_X = 1$ and $\sigma_Y = 2$, so we can compute the correlation between the two random variables using :eqref:`eq_cor_def` to see that
$$
\rho(X, Y) = \frac{4p-2}{1\cdot 2} = 2p-1.
$$
This now ranges between $-1$ and $1$ with the expected behavior of $1$ meaning most correlated, and $-1$ meaning minimally correlated.
As another example, consider $X$ as any random variable, and $Y=aX+b$ as any linear deterministic function of $X$. Then, one can compute that
$$\sigma_{Y} = \sigma_{aX+b} = |a|\sigma_{X},$$
$$\mathrm{Cov}(X, Y) = \mathrm{Cov}(X, aX+b) = a\mathrm{Cov}(X, X) = a\mathrm{Var}(X),$$
and thus by :eqref:`eq_cor_def` that
$$
\rho(X, Y) = \frac{a\mathrm{Var}(X)}{|a|\sigma_{X}^2} = \frac{a}{|a|} = \mathrm{sign}(a).
$$
Thus we see that the correlation is $+1$ for any $a > 0$, and $-1$ for any $a < 0$ illustrating that correlation measures the degree and directionality the two random variables are related, not the scale that the variation takes.
Let us again plot a collection of random variables with tunable correlation.
_____no_output_____
<code>
# Plot a few random variables adjustable correlations
cors = [-0.9, 0.0, 1.0]
d2l.plt.figure(figsize=(12, 3))
for i in range(3):
X = np.random.normal(0, 1, 500)
Y = cors[i] * X + np.sqrt(1 - cors[i]**2) * np.random.normal(0, 1, 500)
d2l.plt.subplot(1, 4, i + 1)
d2l.plt.scatter(X.asnumpy(), Y.asnumpy())
d2l.plt.xlabel('X')
d2l.plt.ylabel('Y')
d2l.plt.title(f'cor = {cors[i]}')
d2l.plt.show()_____no_output_____
</code>
Let us list a few properties of the correlation below.
* For any random variable $X$, $\rho(X, X) = 1$.
* For any random variables $X, Y$ and numbers $a$ and $b$, $\rho(aX+b, Y) = \rho(X, aY+b) = \rho(X, Y)$.
* If $X$ and $Y$ are independent with non-zero variance then $\rho(X, Y) = 0$.
As a final note, you may feel like some of these formulae are familiar. Indeed, if we expand everything out assuming that $\mu_X = \mu_Y = 0$, we see that this is
$$
\rho(X, Y) = \frac{\sum_{i, j} x_iy_ip_{ij}}{\sqrt{\sum_{i, j}x_i^2 p_{ij}}\sqrt{\sum_{i, j}y_j^2 p_{ij}}}.
$$
This looks like a sum of a product of terms divided by the square root of sums of terms. This is exactly the formula for the cosine of the angle between two vectors $\mathbf{v}, \mathbf{w}$ with the different coordinates weighted by $p_{ij}$:
$$
\cos(\theta) = \frac{\mathbf{v}\cdot \mathbf{w}}{\|\mathbf{v}\|\|\mathbf{w}\|} = \frac{\sum_{i} v_iw_i}{\sqrt{\sum_{i}v_i^2}\sqrt{\sum_{i}w_i^2}}.
$$
Indeed if we think of norms as being related to standard deviations, and correlations as being cosines of angles, much of the intuition we have from geometry can be applied to thinking about random variables.
## Summary
* Continuous random variables are random variables that can take on a continuum of values. They have some technical difficulties that make them more challenging to work with compared to discrete random variables.
* The probability density function allows us to work with continuous random variables by giving a function where the area under the curve on some interval gives the probability of finding a sample point in that interval.
* The cumulative distribution function is the probability of observing the random variable to be less than a given threshold. It can provide a useful alternate viewpoint which unifies discrete and continuous variables.
* The mean is the average value of a random variable.
* The variance is the expected square of the difference between the random variable and its mean.
* The standard deviation is the square root of the variance. It can be thought of as measuring the range of values the random variable may take.
* Chebyshev's inequality allows us to make this intuition rigorous by giving an explicit interval that contains the random variable most of the time.
* Joint densities allow us to work with correlated random variables. We may marginalize joint densities by integrating over unwanted random variables to get the distribution of the desired random variable.
* The covariance and correlation coefficient provide a way to measure any linear relationship between two correlated random variables.
## Exercises
1. Suppose that we have the random variable with density given by $p(x) = \frac{1}{x^2}$ for $x \ge 1$ and $p(x) = 0$ otherwise. What is $P(X > 2)$?
2. The Laplace distribution is a random variable whose density is given by $p(x = \frac{1}{2}e^{-|x|}$. What is the mean and the standard deviation of this function? As a hint, $\int_0^\infty xe^{-x} \; dx = 1$ and $\int_0^\infty x^2e^{-x} \; dx = 2$.
3. I walk up to you on the street and say "I have a random variable with mean $1$, standard deviation $2$, and I observed $25\%$ of my samples taking a value larger than $9$." Do you believe me? Why or why not?
4. Suppose that you have two random variables $X, Y$, with joint density given by $p_{XY}(x, y) = 4xy$ for $x, y \in [0,1]$ and $p_{XY}(x, y) = 0$ otherwise. What is the covariance of $X$ and $Y$?
_____no_output_____[Discussions](https://discuss.d2l.ai/t/415)
_____no_output_____
| {
"repository": "gr8khan/d2lai",
"path": "d2l-en/mxnet/chapter_appendix-mathematics-for-deep-learning/random-variables.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 711699,
"hexsha": "d09762f0ca6d77adcc077205cb2c4d527b99a2bf",
"max_line_length": 812,
"avg_line_length": 65.3234511244,
"alphanum_fraction": 0.5271512254
} |
# Notebook from LungCellAtlas/HLCA_reproducibility
Path: notebooks/1_building_and_annotating_the_atlas_core/11_figure_2_data_overview.ipynb
# HLCA Figure 2_____no_output_____Here we will generate the figures from the HLCA pre-print, figure 2. Figure 2d was generated separately in R, using code from integration benchmarking framework 'scIB'._____no_output_____### import modules, set paths and parameters:_____no_output_____
<code>
import scanpy as sc
import pandas as pd
import numpy as np
import sys
import os
from collections import Counter
sys.path.append("../../scripts/")
import reference_based_harmonizing
import celltype_composition_plotting
import plotting
import sankey
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.colors import to_hex
import ast_____no_output_____sc.set_figure_params(
dpi=140,
fontsize=12,
frameon=False,
transparent=True,
)_____no_output_____sns.set_style(style="white")
sns.set_context(context="paper")_____no_output_____
</code>
for pretty code formatting (not needed to run notebook):_____no_output_____
<code>
%load_ext lab_black_____no_output_____
</code>
paths:_____no_output_____
<code>
path_HLCA = "../../data/HLCA_core_h5ads/HLCA_v1.h5ad"
path_celltype_reference = "../../supporting_files/metadata_harmonization/HLCA_cell_type_reference_mapping_20211103.csv"
dir_figures = "../../figures"_____no_output_____
</code>
## Generate figures:_____no_output_____initiate empty dictionary in which to store paper figures._____no_output_____
<code>
FIGURES = dict()_____no_output_____
</code>
for automatic script updating and pretty coding (not necessary for code to run!)_____no_output_____
<code>
adata = sc.read(path_HLCA)_____no_output_____
</code>
#### Overview of stats (number of studies, cells, annotations etc.):_____no_output_____Number of studies, datasets, subjects, samples, cells:_____no_output_____
<code>
print("Number of studies:", len(set(adata.obs.study)))
print("Number of datasets:", len(set(adata.obs.dataset)))
print("Number of subjects:", len(set(adata.obs.subject_ID)))
print("Number of samples:", len(set(adata.obs["sample"])))
print("Number of cells:", adata.obs.shape[0])Number of studies: 11
Number of datasets: 14
Number of subjects: 107
Number of samples: 166
Number of cells: 584884
</code>
Proportions of cell compartments in the HLCA:_____no_output_____
<code>
original_ann_lev_1_percs = np.round(
adata.obs.original_ann_level_1.value_counts() / adata.n_obs * 100, 1
)
print("Original annotation proportions (level 1):")
print(original_ann_lev_1_percs)Original annotation proportions (level 1):
Epithelial 48.1
Immune 38.7
Endothelial 8.5
Stroma 4.3
Proliferating cells 0.3
Name: original_ann_level_1, dtype: float64
</code>
Perc. of cells annotated per level:_____no_output_____
<code>
for level in range(1, 6):
n_unannotated = np.sum(
[
isnone or isnull
for isnone, isnull in zip(
adata.obs[f"original_ann_level_{level}_clean"].values == "None",
pd.isnull(adata.obs[f"original_ann_level_{level}_clean"].values),
)
]
)
n_annotated = adata.n_obs - n_unannotated
print(
f"Perc. originally annotated at level {level}: {round(n_annotated/adata.n_obs*100,1)}"
)Perc. originally annotated at level 1: 100.0
Perc. originally annotated at level 2: 98.8
Perc. originally annotated at level 3: 93.6
Perc. originally annotated at level 4: 65.7
Perc. originally annotated at level 5: 6.8
</code>
Distribution of demographics:_____no_output_____
<code>
print(f"Min. and max. age: {adata.obs.age.min()}, {adata.obs.age.max()}")Min. and max. age: 10.0, 76.0
adata.obs.sex.value_counts() / adata.n_obs * 100_____no_output_____adata.obs.ethnicity.value_counts() / adata.n_obs * 100_____no_output_____print(f"Min. and max. BMI: {adata.obs.BMI.min()}, {adata.obs.BMI.max()}")Min. and max. BMI: 19.9, 48.9
adata.obs.smoking_status.value_counts() / adata.n_obs * 100_____no_output_____
</code>
## figures:_____no_output_____Overview of subjects, samples, and cells per study (not in the paper):_____no_output_____
<code>
plotting.plot_dataset_statistics(adata, fontsize=8, figheightscale=3.5)_____no_output_____
</code>
### 2a Subject/sample distributions_____no_output_____Re-map ethnicities:_____no_output_____
<code>
ethnicity_remapper = {
"asian": "asian",
"black": "black",
"latino": "latino",
"mixed": "mixed",
"nan": "nan",
"pacific islander": "other",
"white": "white",
}_____no_output_____adata.obs.ethnicity = adata.obs.ethnicity.map(ethnicity_remapper)_____no_output_____
</code>
Plot subject demographic and sample anatomical location distributions:_____no_output_____
<code>
FIGURES["2a_subject_and_sample_stats"] = plotting.plot_subject_and_sample_stats_incl_na(
adata, return_fig=True
)age: 99% annotated
BMI: 70% annotated
sex: 100% annotated)
ethnicity 93% annotated
smoking_status: 92% annotated
</code>
## 2b Cell type composition sankey plot, level 1-3:_____no_output_____First, generate a color mapping. We want to map cell types from the same compartment in the same shade (e.g. epithilial orange/red, endothelial purple), at all levels. We'll need to incorporate our hierarchical cell type reference for that, and then calculate the colors per level. That is done with the code below:_____no_output_____
<code>
harmonizing_df = reference_based_harmonizing.load_harmonizing_table(
path_celltype_reference
)
consensus_df = reference_based_harmonizing.create_consensus_table(harmonizing_df)
max_level = 5
color_prop_df = celltype_composition_plotting.calculate_hierarchical_coloring_df(
adata,
consensus_df,
max_level,
lev1_colormap_dict={
"Epithelial": "Oranges",
"Immune": "Greens",
"Endothelial": "Purples",
"Stroma": "Blues",
"Proliferating cells": "Reds",
},
ann_level_name_prefix="original_ann_level_",
)/home/icb/lisa.sikkema/miniconda3/envs/scRNAseq_analysis/lib/python3.7/site-packages/pandas/core/indexing.py:671: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
self._setitem_with_indexer(indexer, value)
</code>
Set minimum percentage among plotted cells for a cell type to be included. This prevents the plot from becoming overcrowded with labels and including lines that are too thin to even see:_____no_output_____
<code>
min_ct_perc = 0.02_____no_output_____
</code>
Now generate the two sankey plots. _____no_output_____
<code>
fig, ax = plt.subplots(figsize=(8, 8))
cts_ordered_left_lev1 = [
ct
for ct in color_prop_df.l1_label
if ct in adata.obs.original_ann_level_1_clean.values
]
ct_to_color_lev1 = {
ct: col for ct, col in zip(color_prop_df.l1_label, color_prop_df.l1_rgba)
}
# get level 1 anns:
y_lev1 = adata.obs.original_ann_level_1_clean
lev1_percs = {ct: n / len(y_lev1) * 100 for ct, n in Counter(y_lev1).items()}
lev1_ct_to_keep = [ct for ct, perc in lev1_percs.items() if perc > min_ct_perc]
# get level 1 anns, set "None" in level 2 compartment specific,
# remove cell types that make up less than min_ct_perc of cells plotted
y_lev2 = adata.obs.original_ann_level_2_clean.cat.remove_unused_categories()
y_lev2 = [
f"{ct} ({lev1ann})" if ct == "None" else ct
for ct, lev1ann in zip(y_lev2, adata.obs.original_ann_level_1_clean)
]
lev2_percs = {ct: n / len(y_lev2) * 100 for ct, n in Counter(y_lev2).items()}
lev2_ct_to_keep = [ct for ct, perc in lev2_percs.items() if perc > min_ct_perc]
# plot sankeyy
sankey.sankey(
x=[
lev1
for lev1, lev2 in zip(y_lev1, list(y_lev2))
if lev1 in lev1_ct_to_keep and lev2 in lev2_ct_to_keep
],
y=[
lev2
for lev1, lev2 in zip(y_lev1, list(y_lev2))
if lev1 in lev1_ct_to_keep and lev2 in lev2_ct_to_keep
],
title="Hierarchical cell type annotation",
title_left="Level 1",
title_right="Level 2",
ax=ax,
fontsize="x-small",
left_order=cts_ordered_left_lev1,
colors={
ct: to_hex(ast.literal_eval(ct_to_color_lev1[ct]))
for ct in cts_ordered_left_lev1
},
alpha=0.8,
)
plt.show()
plt.close()
FIGURES["2b_sankey_1_2"] = fig_____no_output_____fig, ax = plt.subplots(figsize=(8, 8))
# use order from earlier sankey plot
cts_ordered_left_lev2 = [
ct
for ct in [
"Airway epithelium",
"Alveolar epithelium",
"Submucosal Gland",
"None (Epithelial)",
"Myeloid",
"Lymphoid",
"Megakaryocytic and erythroid",
"Granulocytes",
"Blood vessels",
"Lymphatic EC",
"None (Endothelial)",
"Fibroblast lineage",
"Smooth muscle",
"None (Stroma)",
"Mesothelium",
"None (Proliferating cells)",
]
if ct in lev2_ct_to_keep
]
# ct for ct in color_prop_df.l2_label if ct in adata.obs.ann_level_2_clean.values
# ]
ct_to_color_lev2 = {
ct: col for ct, col in zip(color_prop_df.l2_label, color_prop_df.l2_rgba)
}
# manually locate colors fo "None" cell type annotations:
for none_ct in "Epithelial", "Endothelial", "Stroma", "Proliferating cells":
ct_to_color_lev2[f"None ({none_ct})"] = color_prop_df.loc[
color_prop_df.l1_label == none_ct, "l1_rgba"
].values[0]
y_lev3 = adata.obs.original_ann_level_3_clean
y_lev3 = [
f"{ct} ({lev1ann})" if ct.startswith("None") else ct
for ct, lev1ann in zip(y_lev3, adata.obs.original_ann_level_1_clean)
]
lev3_percs = {ct: n / len(y_lev3) * 100 for ct, n in Counter(y_lev3).items()}
lev3_ct_to_keep = [ct for ct, perc in lev3_percs.items() if perc > min_ct_perc]
sankey.sankey(
x=[
lev2
for lev2, lev3 in zip(y_lev2, list(y_lev3))
if lev2 in lev2_ct_to_keep and lev3 in lev3_ct_to_keep
],
y=[
lev3
for lev2, lev3 in zip(y_lev2, list(y_lev3))
if lev2 in lev2_ct_to_keep and lev3 in lev3_ct_to_keep
],
title="Hierarchical cell type annotation",
title_left="Level 2",
title_right="Level 3",
ax=ax,
fontsize=5, # "xx-small",
left_order=cts_ordered_left_lev2,
colors={
ct: to_hex(ast.literal_eval(ct_to_color_lev2[ct]))
for ct in cts_ordered_left_lev2
},
alpha=0.8,
)
plt.show()
plt.close()
FIGURES["2b_sankey_2_3"] = fig_____no_output_____
</code>
### 2c Sample compositions:_____no_output_____In the paper we use ann level 2 and group by sample:_____no_output_____
<code>
ann_level_number = "2"
grouping_covariate = "sample" # choose e.g. "dataset" or "subject_ID" or "sample"_____no_output_____
</code>
Use the "clean" version, i.e. without forward-propagated labels for cells not annotated at the chosen label, but leaving those cells set to "None":_____no_output_____
<code>
if ann_level_number == "1":
ann_level = "original_ann_level_" + ann_level_number
else:
ann_level = "original_ann_level_" + ann_level_number + "_clean"_____no_output_____
</code>
Now plot:_____no_output_____
<code>
FIGURES[
"2c_sample_compositions"
] = celltype_composition_plotting.plot_celltype_composition_per_sample(
adata,
ann_level_number,
color_prop_df,
return_fig=True,
title="original cell type annotations (level 2) per sample",
ann_level_name_prefix="original_ann_level_",
)_____no_output_____
</code>
# Store figures_____no_output_____
<code>
# for figname, fig in FIGURES.items():
# print("Saving", figname)
# fig.savefig(os.path.join(dir_figures, f"{figname}.png"), bbox_inches="tight", dpi=140)_____no_output_____
</code>
| {
"repository": "LungCellAtlas/HLCA_reproducibility",
"path": "notebooks/1_building_and_annotating_the_atlas_core/11_figure_2_data_overview.ipynb",
"matched_keywords": [
"Scanpy"
],
"stars": null,
"size": 665668,
"hexsha": "d098ee030200b90f2aaa88016281e1f999576fdd",
"max_line_length": 207516,
"avg_line_length": 777.6495327103,
"alphanum_fraction": 0.9514728063
} |
# Notebook from STScI-MIRI/TSO-MIRI-simulations
Path: TSO-imaging-sims/datalabs-sim/MIRI_im_tso_datalabs.ipynb
<code>
%load_ext autoreload
%autoreload_____no_output_____import numpy as np
import matplotlib.pyplot as plt
import os
import glob
from mirisim.config_parser import SimulatorConfig
from mirisim import MiriSimulation
import tso_img_datalabs_sim
from tso_img_datalabs_sim import wasp103_scene, wasp103_sim_config
from importlib import reload_____no_output_____
</code>
In this notebook I'm going to generate simulated MIRI time series imaging data, to provide as test set for ESA Datalabs. To install Mirisim, see the [the public release webpage](http://miri.ster.kuleuven.be/bin/view/Public/MIRISim_Public). The target for the mock observations is WASP-103, an exoplanet host star with the following properties from [the exoplanet encyclopaedia](http://exoplanet.eu/catalog/wasp-103_b/):
* spectral type F8V
* T_bb = 6110 K
* V = 12.0, K = 10.7
K magnitude of 10.7 corresponds to a flux of 32.5 mJy or 32.5e3 microJy.
Using the ETC, I calculated the following number of groups for a high-SNR but unsaturated image:
* FULL array: NGROUPS = 5
* SUB64 subarray: NGROUPS = 60
We want to simulate a medium length exposure in both FULL and SUB64 subarras. In total that's 2 simulations.
| Sim no | Array | NGroups | NInt | NExp | Exp time |
| -------|---------| ---------|--------|--------|----------|
|1 |FULL | 5 | 200 | 1 | 0.77 hr |
|2 |SUB64 | 60 | 600 | 1 | 0.85 hr |
### Steps in setting up the simulation
This notebook will go through the following steps:
* Create the scene
* Set up the simulation
* Run the simulation
Each step has its own function. Steps 1 and 2 will each write out a .ini file, which will be used as input for the final step._____no_output_____
<code>
arr = ['FULL', 'SUB64']
ngrp = [5, 60]
#nints = [200, 600]
nints = [1, 1]_____no_output_____
</code>
## Step 1: Creating the input scene (WASP-103)
Here we'll create the input scene for the simulations using the function wasp103_scene(). Arguments:
* scene_file: the filename for the .ini file
* write_cube: write the scene image out to a FITS file (optional; default=False)
The function returns a mirisim.skysim.scenes.CompositeSkyScene object.
_____no_output_____
<code>
scene_ini = wasp103_scene(scene_file='wasp103_scene.ini', write_cube=False)2021-02-24 14:04:56,692 - INFO - Initializing Point
2021-02-24 14:04:56,693 - INFO - Initializing Background
print(scene_ini)wasp103_scene.ini
</code>
## Step 2: Configuring the simulation
Now I'll set up the simulations and prepare to run them. I'll set it up to loop through the 2 simulations. For this I wrote the function wasp103_sim_config. Check the docstring for descriptions and default values of the arguments.
The function will write out another .ini file containing the simulation configuration, and it returns the output filename for further use._____no_output_____
<code>
#reload(tso_img_sims_setup)
#from tso_img_sims_setup import wasp103_sim_config
for (a, g, i) in zip(arr, ngrp, nints):
sim_ini = wasp103_sim_config(mode='imaging', arr=a, ngrp=g, nint=i, nexp=1, filt='F770W',
scene_file=scene_ini, out=True)
print(sim_ini)Found scene file wasp103_scene.ini
wasp103_FULL_5G1I1E_simconfig.ini exists, overwrite (y/[n])?y
wasp103_FULL_5G1I1E_simconfig.ini
Found scene file wasp103_scene.ini
wasp103_SUB64_60G1I1E_simconfig.ini exists, overwrite (y/[n])?y
wasp103_SUB64_60G1I1E_simconfig.ini
</code>
### Step 3: Run the simulation
In the following step we'll run the simulations for the 6 different cases. For each run, we need 3 input files: the scene, the simulation configuration, and the simulator setup file. The first and last of these remain the same for each run, and we loop through the list of 6 simulation config files.
After the simulation has run, the code renames the output directory to include the simulation settings to the directory.
_____no_output_____
<code>
cfg_files = glob.glob('*_simconfig.ini')
print(cfg_files)
['wasp103_FULL_5G1I1E_simconfig.ini', 'wasp103_SUB64_60G1I1E_simconfig.ini']
# configure the simulator engine - this requires no editing from the default
simulator_config = SimulatorConfig.from_default()
for f in cfg_files[:1]:
tmp = f.split('.')
fcomps = tmp[0].split('_')
sim = MiriSimulation.from_configfiles(f)
sim.run()
outdir = sorted(glob.glob('*_*_mirisim'), key=os.path.getmtime )[-1]
new_outdir = 'wasp103_imtso_{0}_{1}_{2}'.format(fcomps[1], fcomps[2], outdir)
os.rename(outdir, new_outdir)
print(outdir, new_outdir)
2021-02-24 14:10:04,456 - INFO - Using simulation configuration: wasp103_FULL_5G1I1E_simconfig.ini
2021-02-24 14:10:04,458 - INFO - Using scene configuration: wasp103_scene.ini
2021-02-24 14:10:04,460 - INFO - MIRISim version: 2.3.0
2021-02-24 14:10:04,461 - INFO - MIRI Simulation started.
2021-02-24 14:10:04,463 - INFO - Output will be saved to: 20210224_141004_mirisim
2021-02-24 14:10:04,464 - INFO - Storing configs in output directory.
2021-02-24 14:10:04,467 - INFO - Storing dither pattern in output directory.
2021-02-24 14:10:04,468 - INFO - Using $CDP_DIR for location of CDP files: /Users/kendrew//CDP_2.3
2021-02-24 14:10:04,469 - INFO - Setting up simulated Observation, with following settings:
2021-02-24 14:10:04,470 - INFO - Configuration Path: IMA_FULL
2021-02-24 14:10:04,471 - INFO - Primary optical path: IMA
2021-02-24 14:10:04,472 - INFO - IMA Filter: F770W
2021-02-24 14:10:04,473 - INFO - IMA Subarray: FULL
2021-02-24 14:10:04,474 - INFO - IMA detector readout mode: FAST
2021-02-24 14:10:04,475 - INFO - IMA detector # exposures: 1
2021-02-24 14:10:04,476 - INFO - IMA detector # integrations: 1
2021-02-24 14:10:04,477 - INFO - IMA detector # frames: 5
2021-02-24 14:10:04,478 - INFO - Parsing: Background
2021-02-24 14:10:04,479 - INFO - Initializing Background
2021-02-24 14:10:04,480 - INFO - Parsing: point_1
2021-02-24 14:10:04,481 - INFO - Initializing Point
2021-02-24 14:10:04,481 - INFO - Simulating a single pointing.
2021-02-24 14:10:04,482 - WARNING - Matching against local CDP cache only.
2021-02-24 14:10:04,483 - ERROR - The criteria given (DISTORTION, detector=MIRIMAGE) did not match any CDP files.
2021-02-24 14:10:04,484 - ERROR - No data model could be retrieved.
</code>
### Step 3: Minor housekeeping to make the sim pipeline-ready
To make the MIRISim data ready for the TSO-specific pipeline, we have to make a couple of small changes to the data:
* add the TSOVISIT = TRUE to the primary header
* make sure the _____no_output_____
| {
"repository": "STScI-MIRI/TSO-MIRI-simulations",
"path": "TSO-imaging-sims/datalabs-sim/MIRI_im_tso_datalabs.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 21629,
"hexsha": "d099ec5708875e294d3b64b8bbb26f320e6b358b",
"max_line_length": 2069,
"avg_line_length": 69.3237179487,
"alphanum_fraction": 0.6629987517
} |
# Notebook from psnx/artificial-intelligence
Path: Exercises/4_Bayesian_Interference/Bayesian_Inference.ipynb
## Our Mission ##
Spam detection is one of the major applications of Machine Learning in the interwebs today. Pretty much all of the major email service providers have spam detection systems built in and automatically classify such mail as 'Junk Mail'.
In this mission we will be using the Naive Bayes algorithm to create a model that can classify SMS messages as spam or not spam, based on the training we give to the model. It is important to have some level of intuition as to what a spammy text message might look like. Often they have words like 'free', 'win', 'winner', 'cash', 'prize' and the like in them as these texts are designed to catch your eye and in some sense tempt you to open them. Also, spam messages tend to have words written in all capitals and also tend to use a lot of exclamation marks. To the human recipient, it is usually pretty straightforward to identify a spam text and our objective here is to train a model to do that for us!
Being able to identify spam messages is a binary classification problem as messages are classified as either 'Spam' or 'Not Spam' and nothing else. Also, this is a supervised learning problem, as we will be feeding a labelled dataset into the model, that it can learn from, to make future predictions.
# Overview
This project has been broken down in to the following steps:
- Step 0: Introduction to the Naive Bayes Theorem
- Step 1.1: Understanding our dataset
- Step 1.2: Data Preprocessing
- Step 2.1: Bag of Words (BoW)
- Step 2.2: Implementing BoW from scratch
- Step 2.3: Implementing Bag of Words in scikit-learn
- Step 3.1: Training and testing sets
- Step 3.2: Applying Bag of Words processing to our dataset.
- Step 4.1: Bayes Theorem implementation from scratch
- Step 4.2: Naive Bayes implementation from scratch
- Step 5: Naive Bayes implementation using scikit-learn
- Step 6: Evaluating our model
- Step 7: Conclusion
**Note**: If you need help with a step, you can find the solution notebook by clicking on the Jupyter logo in the top left of the notebook._____no_output_____### Step 0: Introduction to the Naive Bayes Theorem ###
Bayes Theorem is one of the earliest probabilistic inference algorithms. It was developed by Reverend Bayes (which he used to try and infer the existence of God no less), and still performs extremely well for certain use cases.
It's best to understand this theorem using an example. Let's say you are a member of the Secret Service and you have been deployed to protect the Democratic presidential nominee during one of his/her campaign speeches. Being a public event that is open to all, your job is not easy and you have to be on the constant lookout for threats. So one place to start is to put a certain threat-factor for each person. So based on the features of an individual, like age, whether the person is carrying a bag, looks nervous, etc., you can make a judgment call as to whether that person is a viable threat.
If an individual ticks all the boxes up to a level where it crosses a threshold of doubt in your mind, you can take action and remove that person from the vicinity. Bayes Theorem works in the same way, as we are computing the probability of an event (a person being a threat) based on the probabilities of certain related events (age, presence of bag or not, nervousness of the person, etc.).
One thing to consider is the independence of these features amongst each other. For example if a child looks nervous at the event then the likelihood of that person being a threat is not as much as say if it was a grown man who was nervous. To break this down a bit further, here there are two features we are considering, age AND nervousness. Say we look at these features individually, we could design a model that flags ALL persons that are nervous as potential threats. However, it is likely that we will have a lot of false positives as there is a strong chance that minors present at the event will be nervous. Hence by considering the age of a person along with the 'nervousness' feature we would definitely get a more accurate result as to who are potential threats and who aren't.
This is the 'Naive' bit of the theorem where it considers each feature to be independent of each other which may not always be the case and hence that can affect the final judgement.
In short, Bayes Theorem calculates the probability of a certain event happening (in our case, a message being spam) based on the joint probabilistic distributions of certain other events (in our case, the appearance of certain words in a message). We will dive into the workings of Bayes Theorem later in the mission, but first, let us understand the data we are going to work with._____no_output_____### Step 1.1: Understanding our dataset ###
We will be using a dataset originally compiled and posted on the UCI Machine Learning repository which has a very good collection of datasets for experimental research purposes. If you're interested, you can review the [abstract](https://archive.ics.uci.edu/ml/datasets/SMS+Spam+Collection) and the original [compressed data file](https://archive.ics.uci.edu/ml/machine-learning-databases/00228/) on the UCI site. For this exercise, however, we've gone ahead and downloaded the data for you.
**Here's a preview of the data:**
<img src="images/dqnb.png" height="1242" width="1242">
The columns in the data set are currently not named and as you can see, there are 2 columns.
The first column takes two values, 'ham' which signifies that the message is not spam, and 'spam' which signifies that the message is spam.
The second column is the text content of the SMS message that is being classified._____no_output_____>**Instructions:**
* Import the dataset into a pandas dataframe using the **read_table** method. The file has already been downloaded, and you can access it using the filepath 'smsspamcollection/SMSSpamCollection'. Because this is a tab separated dataset we will be using '\\t' as the value for the 'sep' argument which specifies this format.
* Also, rename the column names by specifying a list ['label', 'sms_message'] to the 'names' argument of read_table().
* Print the first five values of the dataframe with the new column names._____no_output_____
<code>
# '!' allows you to run bash commands from jupyter notebook.
print("List all the files in the current directory\n")
!ls
# The required data table can be found under smsspamcollection/SMSSpamCollection
print("\n List all the files inside the smsspamcollection directory\n")
!ls smsspamcollectionList all the files in the current directory
Bayesian_Inference.ipynb images
Bayesian_Inference_solution.ipynb smsspamcollection
List all the files inside the smsspamcollection directory
readme SMSSpamCollection
!cat smsspamcollection/SMSSpamCollectionham Go until jurong point, crazy.. Available only in bugis n great world la e buffet... Cine there got amore wat...
ham Ok lar... Joking wif u oni...
spam Free entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's
ham U dun say so early hor... U c already then say...
ham Nah I don't think he goes to usf, he lives around here though
spam FreeMsg Hey there darling it's been 3 week's now and no word back! I'd like some fun you up for it still? Tb ok! XxX std chgs to send, £1.50 to rcv
ham Even my brother is not like to speak with me. They treat me like aids patent.
ham As per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
spam WINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.
spam Had your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030
ham I'm gonna be home soon and i don't want to talk about this stuff anymore tonight, k? I've cried enough today.
spam SIX chances to win CASH! From 100 to 20,000 pounds txt> CSH11 and send to 87575. Cost 150p/day, 6days, 16+ TsandCs apply Reply HL 4 info
spam URGENT! You have won a 1 week FREE membership in our £100,000 Prize Jackpot! Txt the word: CLAIM to No: 81010 T&C www.dbuk.net LCCLTD POBOX 4403LDNW1A7RW18
ham I've been searching for the right words to thank you for this breather. I promise i wont take your help for granted and will fulfil my promise. You have been wonderful and a blessing at all times.
ham I HAVE A DATE ON SUNDAY WITH WILL!!
spam XXXMobileMovieClub: To use your credit, click the WAP link in the next txt message or click here>> http://wap. xxxmobilemovieclub.com?n=QJKGIGHJJGCBL
ham Oh k...i'm watching here:)
ham Eh u remember how 2 spell his name... Yes i did. He v naughty make until i v wet.
ham Fine if thats the way u feel. Thats the way its gota b
spam England v Macedonia - dont miss the goals/team news. Txt ur national team to 87077 eg ENGLAND to 87077 Try:WALES, SCOTLAND 4txt/ú1.20 POBOXox36504W45WQ 16+
ham Is that seriously how you spell his name?
ham I‘m going to try for 2 months ha ha only joking
ham So ü pay first lar... Then when is da stock comin...
ham Aft i finish my lunch then i go str down lor. Ard 3 smth lor. U finish ur lunch already?
ham Ffffffffff. Alright no way I can meet up with you sooner?
ham Just forced myself to eat a slice. I'm really not hungry tho. This sucks. Mark is getting worried. He knows I'm sick when I turn down pizza. Lol
ham Lol your always so convincing.
ham Did you catch the bus ? Are you frying an egg ? Did you make a tea? Are you eating your mom's left over dinner ? Do you feel my Love ?
ham I'm back & we're packing the car now, I'll let you know if there's room
ham Ahhh. Work. I vaguely remember that! What does it feel like? Lol
ham Wait that's still not all that clear, were you not sure about me being sarcastic or that that's why x doesn't want to live with us
ham Yeah he got in at 2 and was v apologetic. n had fallen out and she was actin like spoilt child and he got caught up in that. Till 2! But we won't go there! Not doing too badly cheers. You?
ham K tell me anything about you.
ham For fear of fainting with the of all that housework you just did? Quick have a cuppa
spam Thanks for your subscription to Ringtone UK your mobile will be charged £5/month Please confirm by replying YES or NO. If you reply NO you will not be charged
ham Yup... Ok i go home look at the timings then i msg ü again... Xuhui going to learn on 2nd may too but her lesson is at 8am
ham Oops, I'll let you know when my roommate's done
ham I see the letter B on my car
ham Anything lor... U decide...
ham Hello! How's you and how did saturday go? I was just texting to see if you'd decided to do anything tomo. Not that i'm trying to invite myself or anything!
ham Pls go ahead with watts. I just wanted to be sure. Do have a great weekend. Abiola
ham Did I forget to tell you ? I want you , I need you, I crave you ... But most of all ... I love you my sweet Arabian steed ... Mmmmmm ... Yummy
spam 07732584351 - Rodger Burns - MSG = We tried to call you re your reply to our sms for a free nokia mobile + free camcorder. Please call now 08000930705 for delivery tomorrow
ham WHO ARE YOU SEEING?
ham Great! I hope you like your man well endowed. I am <#> inches...
ham No calls..messages..missed calls
ham Didn't you get hep b immunisation in nigeria.
ham Fair enough, anything going on?
ham Yeah hopefully, if tyler can't do it I could maybe ask around a bit
ham U don't know how stubborn I am. I didn't even want to go to the hospital. I kept telling Mark I'm not a weak sucker. Hospitals are for weak suckers.
ham What you thinked about me. First time you saw me in class.
ham A gram usually runs like <#> , a half eighth is smarter though and gets you almost a whole second gram for <#>
ham K fyi x has a ride early tomorrow morning but he's crashing at our place tonight
ham Wow. I never realized that you were so embarassed by your accomodations. I thought you liked it, since i was doing the best i could and you always seemed so happy about "the cave". I'm sorry I didn't and don't have more to give. I'm sorry i offered. I'm sorry your room was so embarassing.
spam SMS. ac Sptv: The New Jersey Devils and the Detroit Red Wings play Ice Hockey. Correct or Incorrect? End? Reply END SPTV
ham Do you know what Mallika Sherawat did yesterday? Find out now @ <URL>
spam Congrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out!
ham Sorry, I'll call later in meeting.
ham Tell where you reached
ham Yes..gauti and sehwag out of odi series.
ham Your gonna have to pick up a $1 burger for yourself on your way home. I can't even move. Pain is killing me.
ham Ha ha ha good joke. Girls are situation seekers.
ham Its a part of checking IQ
ham Sorry my roommates took forever, it ok if I come by now?
ham Ok lar i double check wif da hair dresser already he said wun cut v short. He said will cut until i look nice.
spam As a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066364589
ham Today is "song dedicated day.." Which song will u dedicate for me? Send this to all ur valuable frnds but first rply me...
spam Urgent UR awarded a complimentary trip to EuroDisinc Trav, Aco&Entry41 Or £1000. To claim txt DIS to 87121 18+6*£1.50(moreFrmMob. ShrAcomOrSglSuplt)10, LS1 3AJ
spam Did you hear about the new "Divorce Barbie"? It comes with all of Ken's stuff!
ham I plane to give on this month end.
ham Wah lucky man... Then can save money... Hee...
ham Finished class where are you.
ham HI BABE IM AT HOME NOW WANNA DO SOMETHING? XX
ham K..k:)where are you?how did you performed?
ham U can call me now...
ham I am waiting machan. Call me once you free.
ham Thats cool. i am a gentleman and will treat you with dignity and respect.
ham I like you peoples very much:) but am very shy pa.
ham Does not operate after <#> or what
ham Its not the same here. Still looking for a job. How much do Ta's earn there.
ham Sorry, I'll call later
ham K. Did you call me just now ah?
ham Ok i am on the way to home hi hi
ham You will be in the place of that man
ham Yup next stop.
ham I call you later, don't have network. If urgnt, sms me.
ham For real when u getting on yo? I only need 2 more tickets and one more jacket and I'm done. I already used all my multis.
ham Yes I started to send requests to make it but pain came back so I'm back in bed. Double coins at the factory too. I gotta cash in all my nitros.
ham I'm really not up to it still tonight babe
ham Ela kano.,il download, come wen ur free..
ham Yeah do! Don‘t stand to close tho- you‘ll catch something!
ham Sorry to be a pain. Is it ok if we meet another night? I spent late afternoon in casualty and that means i haven't done any of y stuff42moro and that includes all my time sheets and that. Sorry.
ham Smile in Pleasure Smile in Pain Smile when trouble pours like Rain Smile when sum1 Hurts U Smile becoz SOMEONE still Loves to see u Smiling!!
spam Please call our customer service representative on 0800 169 6031 between 10am-9pm as you have WON a guaranteed £1000 cash or £5000 prize!
ham Havent planning to buy later. I check already lido only got 530 show in e afternoon. U finish work already?
spam Your free ringtone is waiting to be collected. Simply text the password "MIX" to 85069 to verify. Get Usher and Britney. FML, PO Box 5249, MK17 92H. 450Ppw 16
ham Watching telugu movie..wat abt u?
ham i see. When we finish we have loads of loans to pay
ham Hi. Wk been ok - on hols now! Yes on for a bit of a run. Forgot that i have hairdressers appointment at four so need to get home n shower beforehand. Does that cause prob for u?"
ham I see a cup of coffee animation
ham Please don't text me anymore. I have nothing else to say.
ham Okay name ur price as long as its legal! Wen can I pick them up? Y u ave x ams xx
ham I'm still looking for a car to buy. And have not gone 4the driving test yet.
ham As per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
ham wow. You're right! I didn't mean to do that. I guess once i gave up on boston men and changed my search location to nyc, something changed. Cuz on my signin page it still says boston.
ham Umma my life and vava umma love you lot dear
ham Thanks a lot for your wishes on my birthday. Thanks you for making my birthday truly memorable.
ham Aight, I'll hit you up when I get some cash
ham How would my ip address test that considering my computer isn't a minecraft server
ham I know! Grumpy old people. My mom was like you better not be lying. Then again I am always the one to play jokes...
ham Dont worry. I guess he's busy.
ham What is the plural of the noun research?
ham Going for dinner.msg you after.
ham I'm ok wif it cos i like 2 try new things. But i scared u dun like mah. Cos u said not too loud.
spam GENT! We are trying to contact you. Last weekends draw shows that you won a £1000 prize GUARANTEED. Call 09064012160. Claim Code K52. Valid 12hrs only. 150ppm
ham Wa, ur openin sentence very formal... Anyway, i'm fine too, juz tt i'm eatin too much n puttin on weight...Haha... So anythin special happened?
ham As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.
spam You are a winner U have been specially selected 2 receive £1000 or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810910p/min (18+)
ham Goodo! Yes we must speak friday - egg-potato ratio for tortilla needed!
ham Hmm...my uncle just informed me that he's paying the school directly. So pls buy food.
spam PRIVATE! Your 2004 Account Statement for 07742676969 shows 786 unredeemed Bonus Points. To claim call 08719180248 Identifier Code: 45239 Expires
spam URGENT! Your Mobile No. was awarded £2000 Bonus Caller Prize on 5/9/03 This is our final try to contact U! Call from Landline 09064019788 BOX42WR29C, 150PPM
ham here is my new address -apples&pairs&all that malarky
spam Todays Voda numbers ending 7548 are selected to receive a $350 award. If you have a match please call 08712300220 quoting claim code 4041 standard rates app
ham I am going to sao mu today. Will be done only at 12
ham Ü predict wat time ü'll finish buying?
ham Good stuff, will do.
ham Just so that you know,yetunde hasn't sent money yet. I just sent her a text not to bother sending. So its over, you dont have to involve yourself in anything. I shouldn't have imposed anything on you in the first place so for that, i apologise.
ham Are you there in room.
ham HEY GIRL. HOW R U? HOPE U R WELL ME AN DEL R BAK! AGAIN LONG TIME NO C! GIVE ME A CALL SUM TIME FROM LUCYxx
ham K..k:)how much does it cost?
ham I'm home.
ham Dear, will call Tmorrow.pls accomodate.
ham First answer my question.
spam Sunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country the Algarve is in? Txt ansr to 82277. £1.50 SP:Tyrone
spam Want 2 get laid tonight? Want real Dogging locations sent direct 2 ur mob? Join the UK's largest Dogging Network bt Txting GRAVEL to 69888! Nt. ec2a. 31p.msg@150p
ham I only haf msn. It's [email protected]
ham He is there. You call and meet him
ham No no. I will check all rooms befor activities
spam You'll not rcv any more msgs from the chat svc. For FREE Hardcore services text GO to: 69988 If u get nothing u must Age Verify with yr network & try again
ham Got c... I lazy to type... I forgot ü in lect... I saw a pouch but like not v nice...
ham K, text me when you're on the way
ham Sir, Waiting for your mail.
ham A swt thought: "Nver get tired of doing little things 4 lovable persons.." Coz..somtimes those little things occupy d biggest part in their Hearts.. Gud ni8
ham I know you are. Can you pls open the back?
ham Yes see ya not on the dot
ham Whats the staff name who is taking class for us?
spam FreeMsg Why haven't you replied to my text? I'm Randy, sexy, female and live local. Luv to hear from u. Netcollex Ltd 08700621170150p per msg reply Stop to end
ham Ummma.will call after check in.our life will begin from qatar so pls pray very hard.
ham K..i deleted my contact that why?
ham Sindu got job in birla soft ..
ham The wine is flowing and i'm i have nevering..
ham Yup i thk cine is better cos no need 2 go down 2 plaza mah.
ham Ok... Ur typical reply...
ham As per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
ham You are everywhere dirt, on the floor, the windows, even on my shirt. And sometimes when i open my mouth, you are all that comes flowing out. I dream of my world without you, then half my chores are out too. A time of joy for me, lots of tv shows i.ll see. But i guess like all things you just must exist, like rain, hail and mist, and when my time here is done, you and i become one.
ham Aaooooright are you at work?
ham I'm leaving my house now...
ham Hello, my love. What are you doing? Did you get to that interview today? Are you you happy? Are you being a good boy? Do you think of me?Are you missing me ?
spam Customer service annoncement. You have a New Years delivery waiting for you. Please call 07046744435 now to arrange delivery
spam You are a winner U have been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810810
ham Keep yourself safe for me because I need you and I miss you already and I envy everyone that see's you in real life
ham New car and house for my parents.:)i have only new job in hand:)
ham I'm so in love with you. I'm excited each day i spend with you. You make me so happy.
spam -PLS STOP bootydelious (32/F) is inviting you to be her friend. Reply YES-434 or NO-434 See her: www.SMS.ac/u/bootydelious STOP? Send STOP FRND to 62468
spam BangBabes Ur order is on the way. U SHOULD receive a Service Msg 2 download UR content. If U do not, GoTo wap. bangb. tv on UR mobile internet/service menu
ham I place all ur points on e cultures module already.
spam URGENT! We are trying to contact you. Last weekends draw shows that you have won a £900 prize GUARANTEED. Call 09061701939. Claim code S89. Valid 12hrs only
ham Hi frnd, which is best way to avoid missunderstding wit our beloved one's?
ham Great escape. I fancy the bridge but needs her lager. See you tomo
ham Yes :)it completely in out of form:)clark also utter waste.
ham Sir, I need AXIS BANK account no and bank address.
ham Hmmm.. Thk sure got time to hop ard... Ya, can go 4 free abt... Muz call u to discuss liao...
ham What time you coming down later?
ham Bloody hell, cant believe you forgot my surname Mr . Ill give u a clue, its spanish and begins with m...
ham Well, i'm gonna finish my bath now. Have a good...fine night.
ham Let me know when you've got the money so carlos can make the call
ham U still going to the mall?
ham Turns out my friends are staying for the whole show and won't be back til ~ <#> , so feel free to go ahead and smoke that $ <#> worth
ham Text her. If she doesnt reply let me know so i can have her log in
ham Hi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.
ham You lifted my hopes with the offer of money. I am in need. Especially when the end of the month approaches and it hurts my studying. Anyways have a gr8 weekend
ham Lol no. U can trust me.
ham ok. I am a gentleman and will treat you with dignity and respect.
ham He will, you guys close?
ham Going on nothing great.bye
ham Hello handsome ! Are you finding that job ? Not being lazy ? Working towards getting back that net for mummy ? Where's my boytoy now ? Does he miss me ?
ham Haha awesome, be there in a minute
spam Please call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!
ham Have you got Xmas radio times. If not i will get it now
ham I jus reached home. I go bathe first. But my sis using net tell u when she finishes k...
spam Are you unique enough? Find out from 30th August. www.areyouunique.co.uk
ham I'm sorry. I've joined the league of people that dont keep in touch. You mean a great deal to me. You have been a friend at all times even at great personal cost. Do have a great week.|
ham Hi :)finally i completed the course:)
ham It will stop on itself. I however suggest she stays with someone that will be able to give ors for every stool.
ham How are you doing? Hope you've settled in for the new school year. Just wishin you a gr8 day
ham Gud mrng dear hav a nice day
ham Did u got that persons story
ham is your hamster dead? Hey so tmr i meet you at 1pm orchard mrt?
ham Hi its Kate how is your evening? I hope i can see you tomorrow for a bit but i have to bloody babyjontet! Txt back if u can. :) xxx
ham Found it, ENC <#> , where you at?
ham I sent you <#> bucks
ham Hello darlin ive finished college now so txt me when u finish if u can love Kate xxx
ham Your account has been refilled successfully by INR <DECIMAL> . Your KeralaCircle prepaid account balance is Rs <DECIMAL> . Your Transaction ID is KR <#> .
ham Goodmorning sleeping ga.
ham U call me alter at 11 ok.
ham Ü say until like dat i dun buy ericsson oso cannot oredi lar...
ham As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.
ham Aight yo, dats straight dogg
ham You please give us connection today itself before <DECIMAL> or refund the bill
ham Both :) i shoot big loads so get ready!
ham What's up bruv, hope you had a great break. Do have a rewarding semester.
ham Home so we can always chat
ham K:)k:)good:)study well.
ham Yup... How ü noe leh...
ham Sounds great! Are you home now?
ham Finally the match heading towards draw as your prediction.
ham Tired. I haven't slept well the past few nights.
ham Easy ah?sen got selected means its good..
ham I have to take exam with march 3
ham Yeah you should. I think you can use your gt atm now to register. Not sure but if there's anyway i can help let me know. But when you do be sure you are ready.
ham Ok no prob. Take ur time.
ham There is os called ubandu which will run without installing in hard disk...you can use that os to copy the important files in system and give it to repair shop..
ham Sorry, I'll call later
ham U say leh... Of course nothing happen lar. Not say v romantic jus a bit only lor. I thk e nite scenery not so nice leh.
spam 500 New Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today!From ONLY £1 www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18
ham Would really appreciate if you call me. Just need someone to talk to.
spam Will u meet ur dream partner soon? Is ur career off 2 a flyng start? 2 find out free, txt HORO followed by ur star sign, e. g. HORO ARIES
ham Hey company elama po mudyadhu.
ham Life is more strict than teacher... Bcoz Teacher teaches lesson & then conducts exam, But Life first conducts Exam & then teaches Lessons. Happy morning. . .
ham Dear good morning now only i am up
ham Get down in gandhipuram and walk to cross cut road. Right side <#> street road and turn at first right.
ham Dear we are going to our rubber place
ham Sorry battery died, yeah I'm here
ham Yes:)here tv is always available in work place..
spam Text & meet someone sexy today. U can find a date or even flirt its up to U. Join 4 just 10p. REPLY with NAME & AGE eg Sam 25. 18 -msg recd@thirtyeight pence
ham I have printed it oh. So <#> come upstairs
ham Or ill be a little closer like at the bus stop on the same street
ham Where are you?when wil you reach here?
ham New Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!
spam U 447801259231 have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094597
ham Tomarrow final hearing on my laptop case so i cant.
ham PLEASSSSSSSEEEEEE TEL ME V AVENT DONE SPORTSx
ham Okay. No no, just shining on. That was meant to be signing, but that sounds better.
ham Although i told u dat i'm into baig face watches now but i really like e watch u gave cos it's fr u. Thanx 4 everything dat u've done today, i'm touched...
ham U don't remember that old commercial?
ham Too late. I said i have the website. I didn't i have or dont have the slippers
ham I asked you to call him now ok
ham Kallis wont bat in 2nd innings.
ham It didnt work again oh. Ok goodnight then. I.ll fix and have it ready by the time you wake up. You are very dearly missed have a good night sleep.
spam Congratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16
ham Ranjith cal drpd Deeraj and deepak 5min hold
ham Wen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.
ham What you doing?how are you?
ham Ups which is 3days also, and the shipping company that takes 2wks. The other way is usps which takes a week but when it gets to lag you may have to bribe nipost to get your stuff.
ham I'm back, lemme know when you're ready
ham Don't necessarily expect it to be done before you get back though because I'm just now headin out
ham Mmm so yummy babe ... Nice jolt to the suzy
ham Where are you lover ? I need you ...
spam We tried to contact you re your reply to our offer of a Video Handset? 750 anytime networks mins? UNLIMITED TEXT? Camcorder? Reply or call 08000930705 NOW
ham I‘m parked next to a MINI!!!! When are you coming in today do you think?
ham Yup
ham Anyway i'm going shopping on my own now. Cos my sis not done yet. Dun disturb u liao.
ham MY NO. IN LUTON 0125698789 RING ME IF UR AROUND! H*
spam Hey I am really horny want to chat or see me naked text hot to 69698 text charged at 150pm to unsubscribe text stop 69698
ham Why you Dint come with us.
ham Same. Wana plan a trip sometme then
ham Not sure yet, still trying to get a hold of him
spam Ur ringtone service has changed! 25 Free credits! Go to club4mobiles.com to choose content now! Stop? txt CLUB STOP to 87070. 150p/wk Club4 PO Box1146 MK45 2WT
ham The evo. I just had to download flash. Jealous?
spam Ringtone Club: Get the UK singles chart on your mobile each week and choose any top quality ringtone! This message is free of charge.
ham Come to mu, we're sorting out our narcotics situation
ham Night has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay.
spam HMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com
ham Usf I guess, might as well take 1 car
ham No objection. My bf not coming.
ham Thanx...
ham Tell rob to mack his gf in the theater
ham Awesome, I'll see you in a bit
ham Just sent it. So what type of food do you like?
ham All done? All handed in? Celebrations in full swing yet?
ham You got called a tool?
ham "Wen u miss someone, the person is definitely special for u..... But if the person is so special, why to miss them, just Keep-in-touch" gdeve..
ham Ok. I asked for money how far
ham Okie...
ham Yeah I think my usual guy's still passed out from last night, if you get ahold of anybody let me know and I'll throw down
ham K, I might come by tonight then if my class lets out early
ham Ok..
ham hi baby im cruisin with my girl friend what r u up 2? give me a call in and hour at home if thats alright or fone me on this fone now love jenny xxx
ham My life Means a lot to me, Not because I love my life, But because I love the people in my life, The world calls them friends, I call them my World:-).. Ge:-)..
ham Dear,shall mail tonite.busy in the street,shall update you tonite.things are looking ok.varunnathu edukkukayee raksha ollu.but a good one in real sense.
ham Hey you told your name to gautham ah?
ham Haf u found him? I feel so stupid da v cam was working.
ham Oops. 4 got that bit.
ham Are you this much buzy
ham I accidentally deleted the message. Resend please.
spam T-Mobile customer you may now claim your FREE CAMERA PHONE upgrade & a pay & go sim card for your loyalty. Call on 0845 021 3680.Offer ends 28thFeb.T&C's apply
ham Unless it's a situation where YOU GO GURL would be more appropriate
ham Hurt me... Tease me... Make me cry... But in the end of my life when i die plz keep one rose on my grave and say STUPID I MISS U.. HAVE A NICE DAY BSLVYL
ham I cant pick the phone right now. Pls send a message
ham Need a coffee run tomo?Can't believe it's that time of week already
ham Awesome, I remember the last time we got somebody high for the first time with diesel :V
ham Shit that is really shocking and scary, cant imagine for a second. Def up for night out. Do u think there is somewhere i could crash for night, save on taxi?
ham Oh and by the way you do have more food in your fridge! Want to go out for a meal tonight?
ham He is a womdarfull actor
spam SMS. ac Blind Date 4U!: Rodds1 is 21/m from Aberdeen, United Kingdom. Check Him out http://img. sms. ac/W/icmb3cktz8r7!-4 no Blind Dates send HIDE
ham Yup... From what i remb... I think should be can book...
ham Jos ask if u wana meet up?
ham Lol yes. Our friendship is hanging on a thread cause u won't buy stuff.
spam TheMob> Check out our newest selection of content, Games, Tones, Gossip, babes and sport, Keep your mobile fit and funky text WAP to 82468
ham Where are the garage keys? They aren't on the bookshelf
ham Today is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy
spam Think ur smart ? Win £200 this week in our weekly quiz, text PLAY to 85222 now!T&Cs WinnersClub PO BOX 84, M26 3UZ. 16+. GBP1.50/week
ham He says he'll give me a call when his friend's got the money but that he's definitely buying before the end of the week
ham Hi the way I was with u 2day, is the normal way&this is the real me. UR unique&I hope I know u 4 the rest of mylife. Hope u find wot was lost.
ham You made my day. Do have a great day too.
ham K.k:)advance happy pongal.
ham Hmmm... Guess we can go 4 kb n power yoga... Haha, dunno we can tahan power yoga anot... Thk got lo oso, forgot liao...
ham Not really dude, have no friends i'm afraid :(
spam December only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906
ham Coffee cake, i guess...
ham Merry Christmas to you too babe, i love ya *kisses*
ham Hey... Why dont we just go watch x men and have lunch... Haha
ham cud u tell ppl im gona b a bit l8 cos 2 buses hav gon past cos they were full & im still waitin 4 1. Pete x
ham That would be great. We'll be at the Guild. Could meet on Bristol road or somewhere - will get in touch over weekend. Our plans take flight! Have a good week
ham No problem. How are you doing?
ham No calls..messages..missed calls
ham Hi da:)how is the todays class?
ham I'd say that's a good sign but, well, you know my track record at reading women
ham Cool, text me when you're parked
ham I'm reading the text i just sent you. Its meant to be a joke. So read it in that light
ham K.k:)apo k.good movie.
ham Maybe i could get book out tomo then return it immediately ..? Or something.
spam Call Germany for only 1 pence per minute! Call from a fixed line via access number 0844 861 85 85. No prepayment. Direct access!
ham Any chance you might have had with me evaporated as soon as you violated my privacy by stealing my phone number from your employer's paperwork. Not cool at all. Please do not contact me again or I will report you to your supervisor.
spam Valentines Day Special! Win over £1000 in our quiz and take your partner on the trip of a lifetime! Send GO to 83600 now. 150p/msg rcvd. CustCare:08718720201.
ham Ta-Daaaaa! I am home babe, are you still up ?
ham Cool. So how come you havent been wined and dined before?
ham Just sleeping..and surfing
ham Sorry, I'll call later
ham U calling me right? Call my hand phone...
ham Ok that's great thanx a lot.
ham I take it the post has come then! You must have 1000s of texts now! Happy reading. My one from wiv hello caroline at the end is my favourite. Bless him
ham Where u been hiding stranger?
ham Am not interested to do like that.
ham My sister cleared two round in birla soft yesterday.
ham Gudnite....tc...practice going on
ham Dis is yijue. I jus saw ur mail. In case huiming havent sent u my num. Dis is my num.
ham One small prestige problem now.
spam Fancy a shag? I do.Interested? sextextuk.com txt XXUK SUZY to 69876. Txts cost 1.50 per msg. TnCs on website. X
ham Just checking in on you. Really do miss seeing Jeremiah. Do have a great month
ham Nah can't help you there, I've never had an iphone
ham If you're not in my car in an hour and a half I'm going apeshit
ham TODAY is Sorry day.! If ever i was angry with you, if ever i misbehaved or hurt you? plz plz JUST SLAP URSELF Bcoz, Its ur fault, I'm basically GOOD
ham Yo you guys ever figure out how much we need for alcohol? Jay and I are trying to figure out how much we can safely spend on weed
ham <#> ISH MINUTES WAS 5 MINUTES AGO. WTF.
ham Thank You for calling.Forgot to say Happy Onam to you Sirji.I am fine here and remembered you when i met an insurance person.Meet You in Qatar Insha Allah.Rakhesh, ex Tata AIG who joined TISSCO,Tayseer.
spam Congratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send CASH to 86688 only 150p/msg. CC: 08708800282 HG/Suite342/2Lands Row/W1J6HL
ham I'm an actor. When i work, i work in the evening and sleep late. Since i'm unemployed at the moment, i ALWAYS sleep late. When you're unemployed, every day is saturday.
ham Hello! Just got here, st andrews-boy its a long way! Its cold. I will keep you posted
ham Ha ha cool cool chikku chikku:-):-DB-)
ham Oh ok no prob..
ham Check audrey's status right now
ham Busy here. Trying to finish for new year. I am looking forward to finally meeting you...
ham Good afternoon sunshine! How dawns that day ? Are we refreshed and happy to be alive? Do we breathe in the air and smile ? I think of you, my love ... As always
ham Well i know Z will take care of me. So no worries.
spam Update_Now - Xmas Offer! Latest Motorola, SonyEricsson & Nokia & FREE Bluetooth! Double Mins & 1000 Txt on Orange. Call MobileUpd8 on 08000839402 or call2optout/F4Q=
spam Here is your discount code RP176781. To stop further messages reply stop. www.regalportfolio.co.uk. Customer Services 08717205546
ham Wat uniform? In where get?
ham Cool, text me when you're ready
ham Hello my boytoy ... Geeee I miss you already and I just woke up. I wish you were here in bed with me, cuddling me. I love you ...
ham I will spoil you in bed as well :)
ham I'm going for bath will msg you next <#> min..
ham I cant keep talking to people if am not sure i can pay them if they agree to price. So pls tell me what you want to really buy and how much you are willing to pay
spam Thanks for your Ringtone Order, Reference T91. You will be charged GBP 4 per week. You can unsubscribe at anytime by calling customer services on 09057039994
ham Can you say what happen
ham You could have seen me..i did't recognise you Face.:)
ham Well there's not a lot of things happening in Lindsay on New years *sighs* Some bars in Ptbo and the blue heron has something going
ham Keep my payasam there if rinu brings
ham I taught that Ranjith sir called me. So only i sms like that. Becaus hes verifying about project. Prabu told today so only pa dont mistake me..
ham I guess that's why you re worried. You must know that there's a way the body repairs itself. And i'm quite sure you shouldn't worry. We'll take it slow. First the tests, they will guide when your ovulation is then just relax. Nothing you've said is a reason to worry but i.ll keep on followin you up.
ham Yeah sure, give me a couple minutes to track down my wallet
ham Hey leave it. not a big deal:-) take care.
ham Hey i will be late ah... Meet you at 945+
spam Double mins and txts 4 6months FREE Bluetooth on Orange. Available on Sony, Nokia Motorola phones. Call MobileUpd8 on 08000839402 or call2optout/N9DX
ham It took Mr owl 3 licks
ham Customer place i will call you.
ham Mm that time you dont like fun
spam 4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths ? Call MobilesDirect free on 08000938767 to update now! or2stoptxt
ham Yup having my lunch buffet now.. U eat already?
ham Huh so late... Fr dinner?
ham Hey so this sat are we going for the intro pilates only? Or the kickboxing too?
ham Morning only i can ok.
ham Yes i think so. I am in office but my lap is in room i think thats on for the last few days. I didnt shut that down
ham Pick you up bout 7.30ish? What time are and that going?
ham From here after The performance award is calculated every two month.not for current one month period..
ham Was actually sleeping and still might when u call back. So a text is gr8. You rock sis. Will send u a text wen i wake.
ham You are always putting your business out there. You put pictures of your ass on facebook. You are one of the most open people i've ever met. Why would i think a picture of your room would hurt you, make you feel violated.
ham Good evening Sir, Al Salam Wahleykkum.sharing a happy news.By the grace of God, i got an offer from Tayseer,TISSCO and i joined.Hope you are fine.Inshah Allah,meet you sometime.Rakhesh,visitor from India.
ham Hmmm...k...but i want to change the field quickly da:-)i wanna get system administrator or network administrator..
spam FREE RINGTONE text FIRST to 87131 for a poly or text GET to 87131 for a true tone! Help? 0845 2814032 16 after 1st free, tones are 3x£150pw to e£nd txt stop
ham Dear how is chechi. Did you talk to her
ham The hair cream has not been shipped.
ham None of that's happening til you get here though
ham Yep, the great loxahatchee xmas tree burning of <#> starts in an hour
ham Haha get used to driving to usf man, I know a lot of stoners
ham All was well until slightly disastrous class this pm with my fav darlings! Hope day off ok. Coffee wld be good as can't stay late tomorrow. Same time + place as always?
ham Hello! Good week? Fancy a drink or something later?
ham Headin towards busetop
ham Message:some text missing* Sender:Name Missing* *Number Missing *Sent:Date missing *Missing U a lot thats y everything is missing sent via fullonsms.com
ham Come by our room at some point so we can iron out the plan for this weekend
ham Cos i want it to be your thing
ham Okies... I'll go yan jiu too... We can skip ard oso, go cine den go mrt one, blah blah blah...
ham Bring home some Wendy =D
spam 100 dating service cal;l 09064012103 box334sk38ch
ham Whatsup there. Dont u want to sleep
ham Alright i have a new goal now
spam FREE entry into our £250 weekly competition just text the word WIN to 80086 NOW. 18 T&C www.txttowin.co.uk
ham Alright, I'll head out in a few minutes, text me where to meet you
spam Send a logo 2 ur lover - 2 names joined by a heart. Txt LOVE NAME1 NAME2 MOBNO eg LOVE ADAM EVE 07123456789 to 87077 Yahoo! POBox36504W45WQ TxtNO 4 no ads 150p
ham Yes:)from last week itself i'm taking live call.
spam Someone has contacted our dating service and entered your phone because they fancy you! To find out who it is call from a landline 09111032124 . PoBox12n146tf150p
ham Siva is in hostel aha:-.
spam URGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09058094455 from land line. Claim 3030. Valid 12hrs only
ham Send this to ur friends and receive something about ur voice..... How is my speaking expression? 1.childish 2.naughty 3.Sentiment 4.rowdy 5.ful of attitude 6.romantic 7.shy 8.Attractive 9.funny <#> .irritating <#> .lovable. reply me..
ham Ok. She'll be ok. I guess
ham aathi..where are you dear..
ham Any pain on urination any thing else?
ham 7 at esplanade.. Do ü mind giving me a lift cos i got no car today..
ham I wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob
ham At home watching tv lor.
ham Does she usually take fifteen fucking minutes to respond to a yes or no question
spam Congrats! Nokia 3650 video camera phone is your Call 09066382422 Calls cost 150ppm Ave call 3mins vary from mobiles 16+ Close 300603 post BCM4284 Ldn WC1N3XX
ham Booked ticket for pongal?
ham You available now? I'm like right around hillsborough & <#> th
ham The message sent is askin for <#> dollars. Shoul i pay <#> or <#> ?
ham Ask g or iouri, I've told the story like ten times already
ham How long does applebees fucking take
ham Hi hope u get this txt~journey hasnt been gd,now about 50 mins late I think.
ham But i have to. I like to have love and arrange.
ham Yes..he is really great..bhaji told kallis best cricketer after sachin in world:).very tough to get out.
ham You were supposed to wake ME up >:(
ham Oic... I saw him too but i tot he din c me... I found a group liao...
ham Sorry, I'll call later
ham "HEY HEY WERETHE MONKEESPEOPLE SAY WE MONKEYAROUND! HOWDY GORGEOUS, HOWU DOIN? FOUNDURSELF A JOBYET SAUSAGE?LOVE JEN XXX"
ham Sorry, my battery died, I can come by but I'm only getting a gram for now, where's your place?
ham Well done, blimey, exercise, yeah, i kinda remember wot that is, hmm.
ham I wont get concentration dear you know you are my mind and everything :-)
ham LOL ... Have you made plans for new years?
ham 10 min later k...
ham hanks lotsly!
ham Thanks for this hope you had a good day today
ham K:)k:)what are detail you want to transfer?acc no enough?
ham Ok i will tell her to stay out. Yeah its been tough but we are optimistic things will improve this month.
spam Loan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'
ham Si si. I think ill go make those oreo truffles.
ham LOOK AT AMY URE A BEAUTIFUL, INTELLIGENT WOMAN AND I LIKE U A LOT. I KNOW U DONT LIKE ME LIKE THAT SO DONT WORRY.
ham I hope you that's the result of being consistently intelligent and kind. Start asking him about practicum links and keep your ears open and all the best. ttyl
ham 1.20 that call cost. Which i guess isnt bad. Miss ya, need ya, want ya, love ya
ham Going thru a very different feeling.wavering decisions and coping up with the same is the same individual.time will heal everything i believe.
ham Where did u go? My phone is gonna die you have to stay in here
ham Great. Never been better. Each day gives even more reasons to thank God
spam UpgrdCentre Orange customer, you may now claim your FREE CAMERA PHONE upgrade for your loyalty. Call now on 0207 153 9153. Offer ends 26th July. T&C's apply. Opt-out available
ham Sorry, I'll call later ok bye
ham Ok i am on the way to railway
ham great princess! I love giving and receiving oral. Doggy style is my fave position. How about you? I enjoy making love <#> times per night :)
ham They don't put that stuff on the roads to keep it from getting slippery over there?
ham When are you going to ride your bike?
ham Yup, no need. I'll jus wait 4 e rain 2 stop.
ham There are many company. Tell me the language.
spam okmail: Dear Dave this is your final notice to collect your 4* Tenerife Holiday or #5000 CASH award! Call 09061743806 from landline. TCs SAE Box326 CW25WX 150ppm
ham How long has it been since you screamed, princess?
ham Nothing. I meant that once the money enters your account here, the bank will remove its flat rate. Someone transfered <#> to my account and <#> dollars got removed. So the banks differ and charges also differ.be sure you trust the 9ja person you are sending account details to cos...
spam Want 2 get laid tonight? Want real Dogging locations sent direct 2 ur Mob? Join the UK's largest Dogging Network by txting MOAN to 69888Nyt. ec2a. 31p.msg@150p
ham Nice line said by a broken heart- Plz don't cum 1 more times infront of me... Other wise once again I ll trust U... Good 9t:)
ham Ok I'm gonna head up to usf in like fifteen minutes
ham Love you aathi..love u lot..
ham Tension ah?what machi?any problem?
ham K, can I pick up another 8th when you're done?
ham When're you guys getting back? G said you were thinking about not staying for mcr
ham Almost there, see u in a sec
ham Yo carlos, a few friends are already asking me about you, you working at all this weekend?
ham Watching tv lor...
ham Thank you baby! I cant wait to taste the real thing...
ham You should change your fb to jaykwon thuglyfe falconerf
ham If we win its really no 1 side for long time.
spam FREE MESSAGE Activate your 500 FREE Text Messages by replying to this message with the word FREE For terms & conditions, visit www.07781482378.com
ham Dear reached railway. What happen to you
ham Depends on quality. If you want the type i sent boye, faded glory, then about 6. If you want ralphs maybe 2
ham I think i've fixed it can you send a test message?
ham Sorry man my account's dry or I would, if you want we could trade back half or I could buy some shit with my credit card
spam Congrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out!
ham Sorry,in meeting I'll call later
ham What class of <#> reunion?
ham Are you free now?can i call now?
ham Got meh... When?
ham Nope... Think i will go for it on monday... Sorry i replied so late
ham Some of them told accenture is not confirm. Is it true.
ham Kate jackson rec center before 7ish, right?
ham Dear i have reache room
ham Fighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.
ham When can ü come out?
ham Check with nuerologist.
ham Lolnice. I went from a fish to ..water.?
spam +123 Congratulations - in this week's competition draw u have won the £1450 prize to claim just call 09050002311 b4280703. T&Cs/stop SMS 08718727868. Over 18 only 150ppm
ham No it's waiting in e car dat's bored wat. Cos wait outside got nothing 2 do. At home can do my stuff or watch tv wat.
ham Maybe westshore or hyde park village, the place near my house?
ham You should know now. So how's anthony. Are you bringing money. I've school fees to pay and rent and stuff like that. Thats why i need your help. A friend in need....|
ham What's the significance?
ham Your opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..
ham 8 at the latest, g's still there if you can scrounge up some ammo and want to give the new ak a try
ham Prabha..i'm soryda..realy..frm heart i'm sory
ham Lol ok your forgiven :)
ham No..jst change tat only..
spam You are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18+
ham S:)no competition for him.
spam Boltblue tones for 150p Reply POLY# or MONO# eg POLY3 1. Cha Cha Slide 2. Yeah 3. Slow Jamz 6. Toxic 8. Come With Me or STOP 4 more tones txt MORE
spam Your credits have been topped up for http://www.bubbletext.com Your renewal Pin is tgxxrz
ham That way transport is less problematic than on sat night. By the way, if u want to ask n to join my bday, feel free. But need to know definite nos as booking on fri.
ham Usually the person is unconscious that's in children but in adults they may just behave abnormally. I.ll call you now
ham But that's on ebay it might be less elsewhere.
ham Shall i come to get pickle
ham Were gonna go get some tacos
ham That's very rude, you on campus?
spam URGENT!: Your Mobile No. was awarded a £2,000 Bonus Caller Prize on 02/09/03! This is our 2nd attempt to contact YOU! Call 0871-872-9755 BOX95QU
ham Hi i won't b ard 4 christmas. But do enjoy n merry x'mas.
spam Today's Offer! Claim ur £150 worth of discount vouchers! Text YES to 85023 now! SavaMob, member offers mobile! T Cs 08717898035. £3.00 Sub. 16 . Unsub reply X
ham Yes! How is a pretty lady like you single?
spam You will recieve your tone within the next 24hrs. For Terms and conditions please see Channel U Teletext Pg 750
ham Jay says that you're a double-faggot
spam PRIVATE! Your 2003 Account Statement for 07815296484 shows 800 un-redeemed S.I.M. points. Call 08718738001 Identifier Code 41782 Expires 18/11/04
ham What Today-sunday..sunday is holiday..so no work..
ham Gudnite....tc...practice going on
ham I'll be late...
ham I've not called you in a while. This is hoping it was l8r malaria and that you know that we miss you guys. I miss Bani big, so pls give her my love especially. Have a great day.
ham Good afternoon, my love! How goes that day ? I hope maybe you got some leads on a job. I think of you, boytoy and send you a passionate kiss from across the sea
ham Probably gonna be here for a while, see you later tonight <)
ham Or maybe my fat fingers just press all these buttons and it doesn't know what to do.
ham Ummmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear
ham I am in tirupur da, once you started from office call me.
spam from www.Applausestore.com MonthlySubscription@50p/msg max6/month T&CsC web age16 2stop txt stop
ham A famous quote : when you develop the ability to listen to 'anything' unconditionally without losing your temper or self confidence, it means you are ......... 'MARRIED'
ham But am going to college pa. What to do. are else ill come there it self. Pa.
ham 4 oclock at mine. Just to bash out a flat plan.
ham This girl does not stay in bed. This girl doesn't need recovery time. Id rather pass out while having fun then be cooped up in bed
ham Then any special there?
ham I know but you need to get hotel now. I just got my invitation but i had to apologise. Cali is to sweet for me to come to some english bloke's weddin
ham Sorry that took so long, omw now
ham Wait <#> min..
ham Ok give me 5 minutes I think I see her. BTW you're my alibi. You were cutting my hair the whole time.
ham Imagine you finally get to sink into that bath after I have put you through your paces, maybe even having you eat me for a while before I left ... But also imagine the feel of that cage on your cock surrounded by the bath water, reminding you always who owns you ... Enjoy, my cuck
ham Hurry up, I've been weed-deficient for like three days
ham Sure, if I get an acknowledgement from you that it's astoundingly tactless and generally faggy to demand a blood oath fo
ham Ok. Every night take a warm bath drink a cup of milk and you'll see a work of magic. You still need to loose weight. Just so that you know
ham I‘ll have a look at the frying pan in case it‘s cheap or a book perhaps. No that‘s silly a frying pan isn‘t likely to be a book
ham O. Well uv causes mutations. Sunscreen is like essential thesedays
ham Having lunch:)you are not in online?why?
ham I know that my friend already told that.
ham Hi Princess! Thank you for the pics. You are very pretty. How are you?
ham Aiyo... U always c our ex one... I dunno abt mei, she haven reply... First time u reply so fast... Y so lucky not workin huh, got bao by ur sugardad ah...gee..
ham Hi msg me:)i'm in office..
ham Thanx 4 e brownie it's v nice...
ham Geeeee ... I love you so much I can barely stand it
spam GENT! We are trying to contact you. Last weekends draw shows that you won a £1000 prize GUARANTEED. Call 09064012160. Claim Code K52. Valid 12hrs only. 150ppm
ham Fuck babe ... I miss you already, you know ? Can't you let me send you some money towards your net ? I need you ... I want you ... I crave you ...
ham Ill call u 2mrw at ninish, with my address that icky American freek wont stop callin me 2 bad Jen k eh?
ham Oooh bed ridden ey? What are YOU thinking of?
ham So anyways, you can just go to your gym or whatever, my love *smiles* I hope your ok and having a good day babe ... I miss you so much already
ham Love it! Daddy will make you scream with pleasure! I am going to slap your ass with my dick!
ham WOT U WANNA DO THEN MISSY?
ham Yar lor wait 4 my mum 2 finish sch then have lunch lor... I whole morning stay at home clean my room now my room quite clean... Hee...
ham Do you know where my lab goggles went
ham Can you open the door?
ham Waiting for your call.
ham Nope i waiting in sch 4 daddy...
spam You have won ?1,000 cash or a ?2,000 prize! To claim, call09050000327
ham I'm tired of arguing with you about this week after week. Do what you want and from now on, i'll do the same.
ham Ü wait 4 me in sch i finish ard 5..
spam our mobile number has won £5000, to claim calls us back or ring the claims hot line on 09050005321.
ham Arngd marriage is while u r walkin unfortuntly a snake bites u. bt love marriage is dancing in frnt of d snake & sayin Bite me, bite me.
ham Huh so early.. Then ü having dinner outside izzit?
ham Ok anyway no need to change with what you said
spam We tried to contact you re your reply to our offer of 750 mins 150 textand a new video phone call 08002988890 now or reply for free delivery tomorrow
ham my ex-wife was not able to have kids. Do you want kids one day?
ham So how's scotland. Hope you are not over showing your JJC tendencies. Take care. Live the dream
ham Tell them u have a headache and just want to use 1 hour of sick time.
ham I dun thk i'll quit yet... Hmmm, can go jazz ? Yogasana oso can... We can go meet em after our lessons den...
ham "Pete can you please ring meive hardly gotany credit"
ham Ya srsly better than yi tho
ham I'm in a meeting, call me later at
spam For ur chance to win a £250 wkly shopping spree TXT: SHOP to 80878. T's&C's www.txt-2-shop.com custcare 08715705022, 1x150p/wk
spam You have been specially selected to receive a 2000 pound award! Call 08712402050 BEFORE the lines close. Cost 10ppm. 16+. T&Cs apply. AG Promo
spam PRIVATE! Your 2003 Account Statement for 07753741225 shows 800 un-redeemed S. I. M. points. Call 08715203677 Identifier Code: 42478 Expires 24/10/04
ham You still at grand prix?
ham I met you as a stranger and choose you as my friend. As long as the world stands, our friendship never ends. Lets be Friends forever!!! Gud nitz...
ham I am great! How are you?
ham Gud mrng dear have a nice day
spam You have an important customer service announcement. Call FREEPHONE 0800 542 0825 now!
ham Will do. Was exhausted on train this morning. Too much wine and pie. You sleep well too
ham I'm going out to buy mum's present ar.
ham Mind blastin.. No more Tsunamis will occur from now on.. Rajnikant stopped swimming in Indian Ocean..:-D
ham If u sending her home first it's ok lor. I'm not ready yet.
ham Speaking of does he have any cash yet?
ham Be happy there. I will come after noon
ham Meet after lunch la...
ham TaKe CaRE n gET WeLL sOOn
spam XCLUSIVE@CLUBSAISAI 2MOROW 28/5 SOIREE SPECIALE ZOUK WITH NICHOLS FROM PARIS.FREE ROSES 2 ALL LADIES !!! info: 07946746291/07880867867
ham what I meant to say is cant wait to see u again getting bored of this bridgwater banter
ham Neva mind it's ok..
ham It's fine, imma get a drink or somethin. Want me to come find you?
spam 22 days to kick off! For Euro2004 U will be kept up to date with the latest news and results daily. To be removed send GET TXT STOP to 83222
ham Its a valentine game. . . Send dis msg to all ur friends. .. If 5 answers r d same then someone really loves u. Ques- which colour suits me the best?rply me
ham I have many dependents
ham THANX4 TODAY CER IT WAS NICE 2 CATCH UP BUT WE AVE 2 FIND MORE TIME MORE OFTEN OH WELL TAKE CARE C U SOON.C
ham I called and said all to him:)then he have to choose this future.
ham "Happy valentines day" I know its early but i have hundreds of handsomes and beauties to wish. So i thought to finish off aunties and uncles 1st...
ham He like not v shock leh. Cos telling shuhui is like telling leona also. Like dat almost all know liao. He got ask me abt ur reaction lor.
ham For my family happiness..
ham I come n pick ü up... Come out immediately aft ur lesson...
ham Let there be snow. Let there be snow. This kind of weather brings ppl together so friendships can grow.
ham Dear we got <#> dollars hi hi
ham Good words.... But words may leave u in dismay many times.
ham MAKE SURE ALEX KNOWS HIS BIRTHDAY IS OVER IN FIFTEEN MINUTES AS FAR AS YOU'RE CONCERNED
ham sorry, no, have got few things to do. may be in pub later.
ham Nah it's straight, if you can just bring bud or drinks or something that's actually a little more useful than straight cash
ham Haha good to hear, I'm officially paid and on the market for an 8th
ham How many licks does it take to get to the center of a tootsie pop?
ham Yup i thk they r e teacher said that will make my face look longer. Darren ask me not 2 cut too short.
spam New TEXTBUDDY Chat 2 horny guys in ur area 4 just 25p Free 2 receive Search postcode or at gaytextbuddy.com. TXT ONE name to 89693
spam Todays Vodafone numbers ending with 4882 are selected to a receive a £350 award. If your number matches call 09064019014 to receive your £350 award.
ham Please dont say like that. Hi hi hi
ham Thank u!
ham Oh that was a forwarded message. I thought you send that to me
ham Got it. Seventeen pounds for seven hundred ml – hope ok.
spam Dear Voucher Holder, 2 claim this weeks offer, at your PC go to http://www.e-tlp.co.uk/expressoffer Ts&Cs apply.2 stop texts txt STOP to 80062.
ham Me n him so funny...
ham Sweetheart, hope you are not having that kind of day! Have one with loads of reasons to smile. Biola
ham When ü login dat time... Dad fetching ü home now?
ham What will we do in the shower, baby?
ham I had askd u a question some hours before. Its answer
ham Well imma definitely need to restock before thanksgiving, I'll let you know when I'm out
ham said kiss, kiss, i can't do the sound effects! He is a gorgeous man isn't he! Kind of person who needs a smile to brighten his day!
ham Probably gonna swing by in a wee bit
ham Ya very nice. . .be ready on thursday
ham Allo! We have braved the buses and taken on the trains and triumphed. I mean we‘re in b‘ham. Have a jolly good rest of week
ham Watching cartoon, listening music & at eve had to go temple & church.. What about u?
ham Do you mind if I ask what happened? You dont have to say if it is uncomfortable.
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08715203694 Identifier Code: 40533 Expires 31/10/04
ham No prob. I will send to your email.
spam You have won ?1,000 cash or a ?2,000 prize! To claim, call09050000327. T&C: RSTM, SW7 3SS. 150ppm
ham Thats cool! Sometimes slow and gentle. Sonetimes rough and hard :)
ham I'm gonna say no. Sorry. I would but as normal am starting to panic about time. Sorry again! Are you seeing on Tuesday?
ham Wait, do you know if wesleys in town? I bet she does hella drugs!
ham Fine i miss you very much.
ham Did u got that persons story
ham Tell them the drug dealer's getting impatient
ham Sun cant come to earth but send luv as rays. cloud cant come to river but send luv as rain. I cant come to meet U, but can send my care as msg to U. Gud evng
ham You will be in the place of that man
ham It doesnt make sense to take it there unless its free. If you need to know more, wikipedia.com
spam 88800 and 89034 are premium phone services call 08718711108
ham Under the sea, there lays a rock. In the rock, there is an envelope. In the envelope, there is a paper. On the paper, there are 3 words... '
ham Then mum's repent how?
ham Sorry me going home first... Daddy come fetch ü later...
ham Leave it de:-). Start Prepare for next:-)..
ham Yes baby! We can study all the positions of the kama sutra ;)
ham En chikku nange bakra msg kalstiya..then had tea/coffee?
ham Carlos'll be here in a minute if you still need to buy
ham This pay is <DECIMAL> lakhs:)
ham Have a good evening! Ttyl
ham Did u receive my msg?
ham Ho ho - big belly laugh! See ya tomo
spam SMS. ac sun0819 posts HELLO:"You seem cool, wanted to say hi. HI!!!" Stop? Send STOP to 62468
spam Get ur 1st RINGTONE FREE NOW! Reply to this msg with TONE. Gr8 TOP 20 tones to your phone every week just £1.50 per wk 2 opt out send STOP 08452810071 16
ham Ditto. And you won't have to worry about me saying ANYTHING to you anymore. Like i said last night, you do whatever you want and i'll do the same. Peace.
ham I've got <#> , any way I could pick up?
ham I dont knw pa, i just drink milk..
ham Maybe?! Say hi to and find out if got his card. Great escape or wetherspoons?
ham Piggy, r u awake? I bet u're still sleeping. I'm going 4 lunch now...
ham Cause I'm not freaky lol
ham Missed your call cause I was yelling at scrappy. Miss u. Can't wait for u to come home. I'm so lonely today.
ham What is this 'hex' place you talk of? Explain!
ham Ü log off 4 wat. It's sdryb8i
ham Is xy going 4 e lunch?
spam Hi I'm sue. I am 20 years old and work as a lapdancer. I love sex. Text me live - I'm i my bedroom now. text SUE to 89555. By TextOperator G2 1DA 150ppmsg 18+
ham I wanted to ask ü to wait 4 me to finish lect. Cos my lect finishes in an hour anyway.
ham Have you finished work yet? :)
ham Every King Was Once A Crying Baby And Every Great Building Was Once A Map.. Not Imprtant Where U r TODAY, BUT Where U Wil Reach TOMORW. Gud ni8
ham Dear,Me at cherthala.in case u r coming cochin pls call bfore u start.i shall also reach accordingly.or tell me which day u r coming.tmorow i am engaged ans its holiday.
ham Thanks love. But am i doing torch or bold.
spam <Forwarded from 448712404000>Please CALL 08712404000 immediately as there is an urgent message waiting for you.
ham Was the farm open?
ham Sorry to trouble u again. Can buy 4d for my dad again? 1405, 1680, 1843. All 2 big 1 small, sat n sun. Thanx.
ham My sister in law, hope you are having a great month. Just saying hey. Abiola
ham Will purchase d stuff today and mail to you. Do you have a po box number?
ham Ah poop. Looks like ill prob have to send in my laptop to get fixed cuz it has a gpu problem
ham Good. Good job. I like entrepreneurs
ham Aight, you close by or still down around alex's place?
ham meet you in corporation st outside gap … you can see how my mind is working!
ham Mum ask ü to buy food home...
ham K..u also dont msg or reply to his msg..
ham How much r ü willing to pay?
ham Sorry, I'll call later
ham What is important is that you prevent dehydration by giving her enough fluids
ham Thats a bit weird, even ?- where is the do supposed to be happening? But good idea, sure they will be in pub!
ham True dear..i sat to pray evening and felt so.so i sms'd you in some time...
ham I don't think I can get away for a trek that long with family in town, sorry
ham So when do you wanna gym harri
ham Quite late lar... Ard 12 anyway i wun b drivin...
spam To review and KEEP the fantastic Nokia N-Gage game deck with Club Nokia, go 2 www.cnupdates.com/newsletter. unsubscribe from alerts reply with the word OUT
spam 4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths+? Call MobilesDirect free on 08000938767 to update now! or2stoptxt T&Cs
ham Height of Confidence: All the Aeronautics professors wer calld & they wer askd 2 sit in an aeroplane. Aftr they sat they wer told dat the plane ws made by their students. Dey all hurried out of d plane.. Bt only 1 didnt move... He said:"if it is made by my students,this wont even start........ Datz confidence..
ham It just seems like weird timing that the night that all you and g want is for me to come smoke is the same day as when a shitstorm is attributed to me always coming over and making everyone smoke
spam 08714712388 between 10am-7pm Cost 10p
ham Save yourself the stress. If the person has a dorm account, just send your account details and the money will be sent to you.
ham He also knows about lunch menu only da. . I know
ham When i have stuff to sell i.ll tell you
spam +449071512431 URGENT! This is the 2nd attempt to contact U!U have WON £1250 CALL 09071512433 b4 050703 T&CsBCM4235WC1N3XX. callcost 150ppm mobilesvary. max£7. 50
ham Book which lesson? then you msg me... I will call up after work or sth... I'm going to get specs. My membership is PX3748
spam You have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712394 between 10am-7pm
ham Macha dont feel upset.i can assume your mindset.believe me one evening with me and i have some wonderful plans for both of us.LET LIFE BEGIN AGAIN.call me anytime
ham Oh is it? Send me the address
ham S'fine. Anytime. All the best with it.
ham That is wondar full flim.
ham Ya even those cookies have jelly on them
ham The world is running and i am still.maybe all are feeling the same,so be it.or i have to admit,i am mad.then where is the correction?or let me call this is life.and keep running with the world,may be u r also running.lets run.
ham Got it! It looks scrumptious... daddy wants to eat you all night long!
ham Of cos can lar i'm not so ba dao ok... 1 pm lor... Y u never ask where we go ah... I said u would ask on fri but he said u will ask today...
ham Alright omw, gotta change my order to a half8th
ham Exactly. Anyways how far. Is jide her to study or just visiting
ham Dunno y u ask me.
spam Email AlertFrom: Jeri StewartSize: 2KBSubject: Low-cost prescripiton drvgsTo listen to email call 123
ham No he didn't. Spring is coming early yay!
ham Lol you won't feel bad when I use her money to take you out to a steak dinner =D
ham Even u dont get in trouble while convincing..just tel him once or twice and just tel neglect his msgs dont c and read it..just dont reply
ham Leaving to qatar tonite in search of an opportunity.all went fast.pls add me in ur prayers dear.Rakhesh
ham Then why no one talking to me
ham Thanks for looking out for me. I really appreciate.
spam Hi. Customer Loyalty Offer:The NEW Nokia6650 Mobile from ONLY £10 at TXTAUCTION! Txt word: START to No: 81151 & get yours Now! 4T&Ctxt TC 150p/MTmsg
ham Wish i were with you now!
ham Haha mayb u're rite... U know me well. Da feeling of being liked by someone is gd lor. U faster go find one then all gals in our group attached liao.
ham Yes i will be there. Glad you made it.
ham Do well :)all will for little time. Thing of good times ahead:
ham Just got up. have to be out of the room very soon. …. i hadn't put the clocks back til at 8 i shouted at everyone to get up and then realised it was 7. wahay. another hour in bed.
ham Ok. There may be a free gym about.
ham Men like shorter ladies. Gaze up into his eyes.
ham Dunno he jus say go lido. Same time 930.
ham I promise to take good care of you, princess. I have to run now. Please send pics when you get a chance. Ttyl!
spam U are subscribed to the best Mobile Content Service in the UK for £3 per 10 days until you send STOP to 82324. Helpline 08706091795
ham Is there a reason we've not spoken this year? Anyways have a great week and all the best in your exam
ham By monday next week. Give me the full gist
spam Do you realize that in about 40 years, we'll have thousands of old ladies running around with tattoos?
spam You have an important customer service announcement from PREMIER.
ham Dont gimme that lip caveboy
ham When did you get to the library
ham Realy sorry-i don't recognise this number and am now confused :) who r u please?!
ham So why didnt you holla?
ham Cant think of anyone with * spare room off * top of my head
ham Faith makes things possible,Hope makes things work,Love makes things beautiful,May you have all three this Christmas!Merry Christmas!
ham U should have made an appointment
ham Call me when you/carlos is/are here, my phone's vibrate is acting up and I might not hear texts
spam Romantic Paris. 2 nights, 2 flights from £79 Book now 4 next year. Call 08704439680Ts&Cs apply.
ham We are at grandmas. Oh dear, u still ill? I felt Shit this morning but i think i am just hungover! Another night then. We leave on sat.
spam Urgent Ur £500 guaranteed award is still unclaimed! Call 09066368327 NOW closingdate04/09/02 claimcode M39M51 £1.50pmmorefrommobile2Bremoved-MobyPOBox734LS27YF
ham Nothing but we jus tot u would ask cos u ba gua... But we went mt faber yest... Yest jus went out already mah so today not going out... Jus call lor...
ham Wishing you and your family Merry "X" mas and HAPPY NEW Year in advance..
spam UR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039 . SkilGme. TsCs087147403231Winawk!Age16 £1.50perWKsub
ham I'm nt goin, got somethin on, unless they meetin 4 dinner lor... Haha, i wonder who will go tis time...
ham Sorry, I'll call later
ham I cant pick the phone right now. Pls send a message
ham Lol I know! They're so dramatic. Schools already closed for tomorrow. Apparently we can't drive in the inch of snow were supposed to get.
ham Not getting anywhere with this damn job hunting over here!
ham Lol! U drunkard! Just doing my hair at d moment. Yeah still up 4 tonight. Wats the plan?
ham idc get over here, you are not weaseling your way out of this shit twice in a row
ham I wil be there with in <#> minutes. Got any space
ham Just sleeping..and surfing
ham Thanks for picking up the trash.
ham Why don't you go tell your friend you're not sure you want to live with him because he smokes too much then spend hours begging him to come smoke
ham "Hi its Kate it was lovely to see you tonight and ill phone you tomorrow. I got to sing and a guy gave me his card! xxx"
ham Happy New year my dear brother. I really do miss you. Just got your number and decided to send you this text wishing you only happiness. Abiola
ham That means get the door
ham Your opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..
ham Hmmm ... I thought we said 2 hours slave, not 3 ... You are late ... How should I punish you ?
ham Beerage?
spam You have an important customer service announcement from PREMIER. Call FREEPHONE 0800 542 0578 now!
ham Dont think so. It turns off like randomlly within 5min of opening
ham She was supposed to be but couldn't make it, she's still in town though
ham It does it on its own. Most of the time it fixes my spelling. But sometimes it gets a completely diff word. Go figure
spam Ever thought about living a good life with a perfect partner? Just txt back NAME and AGE to join the mobile community. (100p/SMS)
spam 5 Free Top Polyphonic Tones call 087018728737, National Rate. Get a toppoly tune sent every week, just text SUBPOLY to 81618, £3 per pole. UnSub 08718727870.
ham Gud mrng dear hav a nice day
ham This is hoping you enjoyed your game yesterday. Sorry i've not been in touch but pls know that you are fondly bein thot off. Have a great week. Abiola
ham All e best 4 ur driving tmr :-)
ham Y?WHERE U AT DOGBREATH? ITS JUST SOUNDING LIKE JAN C THATS AL!!!!!!!!!
ham Omg I want to scream. I weighed myself and I lost more weight! Woohoo!
ham There generally isn't one. It's an uncountable noun - u in the dictionary. pieces of research?
ham it's really getting me down just hanging around.
spam Orange customer, you may now claim your FREE CAMERA PHONE upgrade for your loyalty. Call now on 0207 153 9996. Offer ends 14thMarch. T&C's apply. Opt-out availa
ham "Petey boy whereare you me and all your friendsare in theKingshead come down if you canlove Nic"
ham Ok i msg u b4 i leave my house.
ham "Gimme a few" was <#> minutes ago
spam Last Chance! Claim ur £150 worth of discount vouchers today! Text SHOP to 85023 now! SavaMob, offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Sub. 16
ham Appt is at <TIME> am. Not my fault u don't listen. I told u twice
spam FREE for 1st week! No1 Nokia tone 4 ur mobile every week just txt NOKIA to 8077 Get txting and tell ur mates. www.getzed.co.uk POBox 36504 W45WQ 16+ norm150p/tone
spam You have won a guaranteed £200 award or even £1000 cashto claim UR award call free on 08000407165 (18+) 2 stop getstop on 88222 PHP. RG21 4JX
ham K I'll be there before 4.
ham I dled 3d its very imp
ham sure, but make sure he knows we ain't smokin yet
ham Boooo you always work. Just quit.
ham I am taking half day leave bec i am not well
ham Ugh I don't wanna get out of bed. It's so warm.
ham S:)s.nervous <#> :)
ham So there's a ring that comes with the guys costumes. It's there so they can gift their future yowifes. Hint hint
spam Congratulations ur awarded either £500 of CD gift vouchers & Free entry 2 our £100 weekly draw txt MUSIC to 87066 TnCs www.Ldew.com1win150ppmx3age16
ham I borrow ur bag ok.
spam U were outbid by simonwatson5120 on the Shinco DVD Plyr. 2 bid again, visit sms. ac/smsrewards 2 end bid notifications, reply END OUT
ham Where's my boytoy? I miss you ... What happened?
ham He has lots of used ones babe, but the model doesn't help. Youi have to bring it over and he'll match it up
ham Also are you bringing galileo or dobby
ham Then why you not responding
ham "BOO BABE! U ENJOYIN YOURJOB? U SEEMED 2 B GETTIN ON WELL HUNNY!HOPE URE OK?TAKE CARE & ILLSPEAK 2U SOONLOTS OF LOVEME XXXX."
ham Good afternoon starshine! How's my boytoy? Does he crave me yet? Ache to fuck me ? *sips cappuccino* I miss you babe *teasing kiss*
ham On the road so cant txt
spam SMSSERVICES. for yourinclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE
spam 25p 4 alfie Moon's Children in need song on ur mob. Tell ur m8s. Txt Tone charity to 8007 for Nokias or Poly charity for polys: zed 08701417012 profit 2 charity.
ham Have a good evening! Ttyl
ham Hmm .. Bits and pieces lol ... *sighs* ...
ham Hahaha..use your brain dear
ham Hey. You got any mail?
ham Sorry light turned green, I meant another friend wanted <#> worth but he may not be around
ham Thanks for yesterday sir. You have been wonderful. Hope you enjoyed the burial. MojiBiola
spam U have a secret admirer. REVEAL who thinks U R So special. Call 09065174042. To opt out Reply REVEAL STOP. 1.50 per msg recd. Cust care 07821230901
ham Hi mate its RV did u hav a nice hol just a message 3 say hello coz havent sent u 1 in ages started driving so stay off roads!RVx
spam Dear Voucher Holder, To claim this weeks offer, at you PC please go to http://www.e-tlp.co.uk/expressoffer Ts&Cs apply. To stop texts, txt STOP to 80062
ham Thank you so much. When we skyped wit kz and sura, we didnt get the pleasure of your company. Hope you are good. We've given you ultimatum oh! We are countin down to aburo. Enjoy! This is the message i sent days ago
ham Surely result will offer:)
ham Good Morning my Dear........... Have a great & successful day.
spam Do you want 750 anytime any network mins 150 text and a NEW VIDEO phone for only five pounds per week call 08002888812 or reply for delivery tomorrow
ham Sir, I have been late in paying rent for the past few months and had to pay a $ <#> charge. I felt it would be inconsiderate of me to nag about something you give at great cost to yourself and that's why i didnt speak up. I however am in a recession and wont be able to pay the charge this month hence my askin well ahead of month's end. Can you please help. Thanks
spam We tried to contact you re our offer of New Video Phone 750 anytime any network mins HALF PRICE Rental camcorder call 08000930705 or reply for delivery Wed
spam Last chance 2 claim ur £150 worth of discount vouchers-Text YES to 85023 now!SavaMob-member offers mobile T Cs 08717898035. £3.00 Sub. 16 . Remove txt X or STOP
ham I luv u soo much u dont understand how special u r 2 me ring u 2morrow luv u xxx
ham Pls send me a comprehensive mail about who i'm paying, when and how much.
ham Our Prashanthettan's mother passed away last night. pray for her and family.
spam Urgent! call 09066350750 from your landline. Your complimentary 4* Ibiza Holiday or 10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150 ppm 18+
ham K.k:)when are you going?
ham Meanwhile in the shit suite: xavier decided to give us <#> seconds of warning that samantha was coming over and is playing jay's guitar to impress her or some shit. Also I don't think doug realizes I don't live here anymore
ham My stomach has been thru so much trauma I swear I just can't eat. I better lose weight.
ham I am in office:)whats the matter..msg me now.i will call you at break:).
ham Yeah there's barely enough room for the two of us, x has too many fucking shoes. Sorry man, see you later
spam Today's Offer! Claim ur £150 worth of discount vouchers! Text YES to 85023 now! SavaMob, member offers mobile! T Cs 08717898035. £3.00 Sub. 16 . Unsub reply X
ham U reach orchard already? U wan 2 go buy tickets first?
ham I am real, baby! I want to bring out your inner tigress...
ham No da if you run that it activate the full version da.
ham "AH POOR BABY!HOPE URFEELING BETTERSN LUV! PROBTHAT OVERDOSE OF WORK HEY GO CAREFUL SPK 2 U SN LOTS OF LOVEJEN XXX."
ham Stop the story. I've told him i've returned it and he's saying i should not re order it.
spam Talk sexy!! Make new friends or fall in love in the worlds most discreet text dating service. Just text VIP to 83110 and see who you could meet.
ham Going to take your babe out ?
ham Hai ana tomarrow am coming on morning. <DECIMAL> ill be there in sathy then we ll go to RTO office. Reply me after came to home.
ham Spoons it is then okay?
ham Did he just say somebody is named tampa
ham In work now. Going have in few min.
ham Your brother is a genius
ham Sorry, I guess whenever I can get a hold of my connections, maybe an hour or two? I'll text you
ham Did u find out what time the bus is at coz i need to sort some stuff out.
ham Dude ive been seeing a lotta corvettes lately
spam Congratulations ur awarded either a yrs supply of CDs from Virgin Records or a Mystery Gift GUARANTEED Call 09061104283 Ts&Cs www.smsco.net £1.50pm approx 3mins
ham Same here, but I consider walls and bunkers and shit important just because I never play on peaceful but I guess your place is high enough that it don't matter
spam PRIVATE! Your 2003 Account Statement for 07808 XXXXXX shows 800 un-redeemed S. I. M. points. Call 08719899217 Identifier Code: 41685 Expires 07/11/04
spam Hello. We need some posh birds and chaps to user trial prods for champneys. Can i put you down? I need your address and dob asap. Ta r
spam What do U want for Xmas? How about 100 free text messages & a new video phone with half price line rental? Call free now on 0800 0721072 to find out more!
ham Well am officially in a philosophical hole, so if u wanna call am at home ready to be saved!
ham Its going good...no problem..but still need little experience to understand american customer voice...
ham I'll text you when I drop x off
ham Ugh its been a long day. I'm exhausted. Just want to cuddle up and take a nap
ham Talk With Yourself Atleast Once In A Day...!!! Otherwise You Will Miss Your Best FRIEND In This WORLD...!!! -Shakespeare- SHESIL <#>
spam Shop till u Drop, IS IT YOU, either 10K, 5K, £500 Cash or £100 Travel voucher, Call now, 09064011000. NTT PO Box CR01327BT fixedline Cost 150ppm mobile vary
ham Are you in castor? You need to see something
spam Sunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country Liverpool played in mid week? Txt ansr to 82277. £1.50 SP:Tyrone
spam U have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094565
spam U have a Secret Admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09065171142-stopsms-08
spam Reminder: You have not downloaded the content you have already paid for. Goto http://doit. mymoby. tv/ to collect your content.
ham see, i knew giving you a break a few times woul lead to you always wanting to miss curfew. I was gonna gibe you 'til one, but a MIDNIGHT movie is not gonna get out til after 2. You need to come home. You need to getsleep and, if anything, you need to b studdying ear training.
ham I love to give massages. I use lots of baby oil... What is your fave position?
ham Dude we should go sup again
ham Yoyyooo u know how to change permissions for a drive in mac. My usb flash drive
ham Gibbs unsold.mike hussey
ham I like to talk pa but am not able to. I dont know y.
ham Y dun cut too short leh. U dun like ah? She failed. She's quite sad.
ham You unbelievable faglord
ham Wife.how she knew the time of murder exactly
ham Why do you ask princess?
ham I am great princess! What are you thinking about me? :)
ham Nutter. Cutter. Ctter. Cttergg. Cttargg. Ctargg. Ctagg. ie you
ham It's ok i noe u're busy but i'm really too bored so i msg u. I oso dunno wat colour she choose 4 me one.
ham Doesn't g have class early tomorrow and thus shouldn't be trying to smoke at <#>
ham Superb Thought- "Be grateful that u dont have everything u want. That means u still have an opportunity to be happier tomorrow than u are today.":-)
ham Hope you are having a good week. Just checking in
ham I'm used to it. I just hope my agents don't drop me since i've only booked a few things this year. This whole me in boston, them in nyc was an experiment.
ham Thursday night? Yeah, sure thing, we'll work it out then
spam Your free ringtone is waiting to be collected. Simply text the password "MIX" to 85069 to verify. Get Usher and Britney. FML, PO Box 5249, MK17 92H. 450Ppw 16
ham Probably money worries. Things are coming due and i have several outstanding invoices for work i did two and three months ago.
ham How is it possible to teach you. And where.
ham I wonder if your phone battery went dead ? I had to tell you, I love you babe
ham Lovely smell on this bus and it ain't tobacco...
ham We're all getting worried over here, derek and taylor have already assumed the worst
ham Hey what's up charles sorry about the late reply.
spam all the lastest from Stereophonics, Marley, Dizzee Racal, Libertines and The Strokes! Win Nookii games with Flirt!! Click TheMob WAP Bookmark or text WAP to 82468
ham I.ll give her once i have it. Plus she said grinule greet you whenever we speak
ham WHITE FUDGE OREOS ARE IN STORES
spam January Male Sale! Hot Gay chat now cheaper, call 08709222922. National rate from 1.5p/min cheap to 7.8p/min peak! To stop texts call 08712460324 (10p/min)
ham My love ! How come it took you so long to leave for Zaher's? I got your words on ym and was happy to see them but was sad you had left. I miss you
ham I am sorry it hurt you.
ham Can't. I feel nauseous. I'm so pissed. I didn't eat any sweets all week cause today I was planning to pig out. I was dieting all week. And now I'm not hungry :/
ham Ok lor but not too early. Me still having project meeting now.
ham Call me da, i am waiting for your call.
ham I could ask carlos if we could get more if anybody else can chip in
ham Was actually about to send you a reminder today. Have a wonderful weekend
ham When people see my msgs, They think Iam addicted to msging... They are wrong, Bcoz They don\'t know that Iam addicted to my sweet Friends..!! BSLVYL
ham Hey you gave them your photo when you registered for driving ah? Tmr wanna meet at yck?
ham Dont talk to him ever ok its my word.
ham When u wana see it then
ham On ma way to school. Can you pls send me ashley's number
ham It shall be fine. I have avalarr now. Will hollalater
ham She went to attend another two rounds today..but still did't reach home..
ham Actually i deleted my old website..now i m blogging at magicalsongs.blogspot.com
ham K, wait chikku..il send aftr <#> mins
ham But I'm on a diet. And I ate 1 too many slices of pizza yesterday. Ugh I'm ALWAYS on a diet.
ham K:)i will give my kvb acc details:)
ham Oh all have to come ah?
spam money!!! you r a lucky winner ! 2 claim your prize text money 2 88600 over £1million to give away ! ppt150x3+normal text rate box403 w1t1jy
ham I'm really sorry i won't b able 2 do this friday.hope u can find an alternative.hope yr term's going ok:-)
ham Congratulations ore mo owo re wa. Enjoy it and i wish you many happy moments to and fro wherever you go
ham So do you have samus shoulders yet
ham What time you think you'll have it? Need to know when I should be near campus
spam Dear Matthew please call 09063440451 from a landline, your complimentary 4*Lux Tenerife holiday or £1000 CASH await collection. ppm150 SAE T&Cs Box334 SK38XH.
ham Then dun wear jeans lor...
ham Since when, which side, any fever, any vomitin.
ham K:)k.are you in college?
spam Urgent! call 09061749602 from Landline. Your complimentary 4* Tenerife Holiday or £10,000 cash await collection SAE T&Cs BOX 528 HP20 1YF 150ppm 18+
ham Better. Made up for Friday and stuffed myself like a pig yesterday. Now I feel bleh. But at least its not writhing pain kind of bleh.
ham No we sell it all so we'll have tons if coins. Then sell our coins to someone thru paypal. Voila! Money back in life pockets:)
ham Theyre doing it to lots of places. Only hospitals and medical places are safe.
spam How about getting in touch with folks waiting for company? Just txt back your NAME and AGE to opt in! Enjoy the community (150p/SMS)
ham And also I've sorta blown him off a couple times recently so id rather not text him out of the blue looking for weed
ham I sent my scores to sophas and i had to do secondary application for a few schools. I think if you are thinking of applying, do a research on cost also. Contact joke ogunrinde, her school is one me the less expensive ones
ham I cant wait to see you! How were the photos were useful? :)
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1
ham Hey i booked the kb on sat already... what other lessons are we going for ah? Keep your sat night free we need to meet and confirm our lodging
ham Chk in ur belovd ms dict
ham Is that what time you want me to come?
ham Awesome, lemme know whenever you're around
ham Shb b ok lor... Thanx...
ham Beautiful Truth against Gravity.. Read carefully: "Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it.." GOOD NIGHT
ham Also remember to get dobby's bowl from your car
spam Filthy stories and GIRLS waiting for your
ham Sorry i now then c ur msg... Yar lor so poor thing... But only 4 one night... Tmr u'll have a brand new room 2 sleep in...
ham Love isn't a decision, it's a feeling. If we could decide who to love, then, life would be much simpler, but then less magical
ham Welp apparently he retired
ham My sort code is and acc no is . The bank is natwest. Can you reply to confirm i've sent this to the right person!
ham Where @
ham U sure u can't take any sick time?
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001808 from land line. Claim M95. Valid12hrs only
ham Watching cartoon, listening music & at eve had to go temple & church.. What about u?
ham Yo chad which gymnastics class do you wanna take? The site says Christians class is full..
ham Are you this much buzy
ham Or better still can you catch her and let ask her if she can sell <#> for me.
ham I am not sure about night menu. . . I know only about noon menu
ham What do u want when i come back?.a beautiful necklace as a token of my heart for you.thats what i will give but ONLY to MY WIFE OF MY LIKING.BE THAT AND SEE..NO ONE can give you that.dont call me.i will wait till i come.
ham Are you willing to go for aptitude class.
ham It wont b until 2.15 as trying 2 sort house out, is that ok?
ham Yar lor he wan 2 go c horse racing today mah, so eat earlier lor. I ate chicken rice. U?
ham Haha awesome, omw back now then
ham Yup i thk so until e shop closes lor.
ham what is your account number?
ham Eh u send wrongly lar...
ham Hey no I ad a crap nite was borin without ya 2 boggy with me u boring biatch! Thanx but u wait til nxt time il ave ya
ham Ok i shall talk to him
ham Dont hesitate. You know this is the second time she has had weakness like that. So keep i notebook of what she eat and did the day before or if anything changed the day before so that we can be sure its nothing
ham Hey you can pay. With salary de. Only <#> .
ham Another month. I need chocolate weed and alcohol.
ham If he started searching he will get job in few days.he have great potential and talent.
ham Reckon need to be in town by eightish to walk from * carpark.
spam Congrats! 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid your mates, play java games, Dload polyPH music, noline rentl.
ham LOOK AT THE FUCKIN TIME. WHAT THE FUCK YOU THINK IS UP
ham Yo guess what I just dropped
ham Carlos says he'll be at mu in <#> minutes
ham I'm in office now . I will call you <#> min:)
ham Geeee ... I miss you already, you know ? Your all I can think about. Fuck, I can't wait till next year when we will be together ... *loving kiss*
ham Yun ah.the ubi one say if ü wan call by tomorrow.call 67441233 look for irene.ere only got bus8,22,65,61,66,382. Ubi cres,ubi tech park.6ph for 1st 5wkg days.èn
ham Ugh. Gotta drive back to sd from la. My butt is sore.
ham 26th OF JULY
ham Hi im having the most relaxing time ever! we have to get up at 7am every day! was the party good the other night? I get home tomorrow at 5ish.
ham Up to ü... Ü wan come then come lor... But i din c any stripes skirt...
ham The Xmas story is peace.. The Xmas msg is love.. The Xmas miracle is jesus.. Hav a blessed month ahead & wish U Merry Xmas...
ham I can't, I don't have her number!
ham Change again... It's e one next to escalator...
ham Yetunde i'm in class can you not run water on it to make it ok. Pls now.
ham Not a lot has happened here. Feels very quiet. Beth is at her aunts and charlie is working lots. Just me and helen in at the mo. How have you been?
ham Then ü wait 4 me at bus stop aft ur lect lar. If i dun c ü then i go get my car then come back n pick ü.
ham Aight will do, thanks again for comin out
ham No..but heard abt tat..
spam Please call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!
ham Yes..he is really great..bhaji told kallis best cricketer after sachin in world:).very tough to get out.
ham <#> am I think? Should say on syllabus
ham Umma. Did she say anything
ham Give me a sec to think think about it
spam Panasonic & BluetoothHdset FREE. Nokia FREE. Motorola FREE & DoubleMins & DoubleTxt on Orange contract. Call MobileUpd8 on 08000839402 or call 2optout
ham I don't quite know what to do. I still can't get hold of anyone. I cud pick you up bout 7.30pm and we can see if they're in the pub?
ham Poyyarikatur,kolathupalayam,unjalur post,erode dis, <#> .
ham Dear Hero,i am leaving to qatar tonite for an apt opportunity.pls do keep in touch at <EMAIL> ,kerala
ham Lol I would but my mom would have a fit and tell the whole family how crazy and terrible I am
ham I just got home babe, are you still awake ?
ham I dunno they close oredi not... Ü v ma fan...
ham Just buy a pizza. Meat lovers or supreme. U get to pick.
ham Ya, told..she was asking wats matter?
ham Dear,regret i cudnt pick call.drove down frm ctla now at cochin home.left mobile in car..ente style ishtamayoo?happy bakrid!
spam FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+
ham Shall i send that exe to your mail id.
ham Nope watching tv at home... Not going out. V bored...
ham Don know..wait i will check it.
ham Good afternoon on this glorious anniversary day, my sweet J !! I hope this finds you happy and content, my Prey. I think of you and send a teasing kiss from across the sea coaxing images of fond souveniers ... You Cougar-Pen
spam Guess what! Somebody you know secretly fancies you! Wanna find out who it is? Give us a call on 09065394514 From Landline DATEBox1282EssexCM61XN 150p/min 18
ham We still on for tonight?
ham May i call You later Pls
ham Hasn't that been the pattern recently crap weekends?
ham I have a sore throat. It's scratches when I talk
ham Yes da. Any plm at ur office
ham Are you not around or just still asleep? :V
ham Lol you forgot it eh ? Yes, I'll bring it in babe
ham Its good, we'll find a way
ham Can not use foreign stamps in this country. Good lecture .
ham Yup bathe liao...
ham HAPPY NEW YEAR MY NO.1 MAN
ham OH MR SHEFFIELD! You wanna play THAT game, okay. You're the boss and I'm the nanny. You give me a raise and I'll give YOU one!!
ham ZOE IT JUST HIT ME 2 IM FUCKING SHITIN MYSELF IL DEFO TRY MY HARDEST 2 CUM 2MOROW LUV U MILLIONS LEKDOG
ham Hello baby, did you get back to your mom's ? Are you setting up the computer now ? Filling your belly ? How goes it loverboy ? I miss you already ... *sighs*
ham No my blankets are sufficient, thx
ham naughty little thought: 'its better to flirt, flirt n flirt, rather than loving someone n gettin hurt, hurt n hurt...:-) Gud nyt
ham Edison has rightly said, "A fool can ask more questions than a wise man can answer" Now you know why all of us are speechless during ViVa.. GM,GN,GE,GNT:-)
ham They just talking thats it de. They wont any other.
ham Today am going to college so am not able to atten the class.
ham I'm in class. Will holla later
ham Easy ah?sen got selected means its good..
ham Mmm thats better now i got a roast down me! id b better if i had a few drinks down me 2! Good indian?
spam We know someone who you know that fancies you. Call 09058097218 to find out who. POBox 6, LS15HB 150p
ham Come round, it's .
ham Do 1 thing! Change that sentence into: "Because i want 2 concentrate in my educational career im leaving here.."
spam 1000's flirting NOW! Txt GIRL or BLOKE & ur NAME & AGE, eg GIRL ZOE 18 to 8007 to join and get chatting!
ham I walked an hour 2 c u! doesnt that show I care y wont u believe im serious?
spam 18 days to Euro2004 kickoff! U will be kept informed of all the latest news and results daily. Unsubscribe send GET EURO STOP to 83222.
ham Are you available for soiree on June 3rd?
ham Do u noe wat time e place dat sells 4d closes?
ham I got another job! The one at the hospital doing data analysis or something, starts on monday! Not sure when my thesis will got finished
ham Jay's getting really impatient and belligerent
ham HIYA COMIN 2 BRISTOL 1 ST WEEK IN APRIL. LES GOT OFF + RUDI ON NEW YRS EVE BUT I WAS SNORING.THEY WERE DRUNK! U BAK AT COLLEGE YET? MY WORK SENDS INK 2 BATH.
ham I'm at work. Please call
ham Then u drive lor.
ham Ard 515 like dat. Y?
ham Tell me they're female :V how're you throwing in? We're deciding what all to get now
spam EASTENDERS TV Quiz. What FLOWER does DOT compare herself to? D= VIOLET E= TULIP F= LILY txt D E or F to 84025 NOW 4 chance 2 WIN £100 Cash WKENT/150P16+
ham I'm working technical support :)voice process.networking field.
ham I might come to kerala for 2 days.so you can be prepared to take a leave once i finalise .dont plan any travel during my visit.need to finish urgent works.
ham Ok. Not sure what time tho as not sure if can get to library before class. Will try. See you at some point! Have good eve.
spam We have new local dates in your area - Lots of new people registered in YOUR AREA. Reply DATE to start now! 18 only www.flirtparty.us REPLYS150
ham That's fine, I'll bitch at you about it later then
ham No my mum went 2 dentist.
ham Once free call me sir. I am waiting for you.
ham Meeting u is my work. . . Tel me when shall i do my work tomorrow
spam Someone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058091854 NOW all will be revealed. PO BOX385 M6 6WU
ham Jus finish bathing...
ham alright, I'll make sure the car is back tonight
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050003091 from land line. Claim C52. Valid12hrs only
spam Dear U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688
ham Lul im gettin some juicy gossip at the hospital. Two nurses are talking about how fat they are gettin. And one thinks shes obese. Oyea.
ham Aight ill get on fb in a couple minutes
ham Oi. Ami parchi na re. Kicchu kaaj korte iccha korche na. Phone ta tul na. Plz. Plz.
ham Where can download clear movies. Dvd copies.
ham Yep, by the pretty sculpture
ham Convey my regards to him
ham Me too watching surya movie only. . .after 6 pm vijay movie POKKIRI
ham You tell what happen dont behave like this to me. Ok no need to say
ham Can u get pic msgs to your phone?
ham Send to someone else :-)
ham Wat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!
ham For me the love should start with attraction.i should feel that I need her every time around me.she should be the first thing which comes in my thoughts.I would start the day and end it with her.she should be there every time I dream.love will be then when my every breath has her name.my life should happen around her.my life will be named to her.I would cry for her.will give all my happiness and take all her sorrows.I will be ready to fight with anyone for her.I will be in love when I will be doing the craziest things for her.love will be when I don't have to proove anyone that my girl is the most beautiful lady on the whole planet.I will always be singing praises for her.love will be when I start up making chicken curry and end up makiing sambar.life will be the most beautiful then.will get every morning and thank god for the day because she is with me.I would like to say a lot..will tell later..
ham FR'NDSHIP is like a needle of a clock. Though V r in d same clock, V r nt able 2 met. Evn if V meet,itz only 4few seconds. Bt V alwys stay conected. Gud 9t;-)
ham I don't think he has spatula hands!
ham You can never do NOTHING
spam You are awarded a SiPix Digital Camera! call 09061221061 from landline. Delivery within 28days. T Cs Box177. M221BP. 2yr warranty. 150ppm. 16 . p p£3.99
ham Goodmorning today i am late for <DECIMAL> min.
spam WIN URGENT! Your mobile number has been awarded with a £2000 prize GUARANTEED call 09061790121 from land line. claim 3030 valid 12hrs only 150ppm
ham Please da call me any mistake from my side sorry da. Pls da goto doctor.
ham Where r we meeting?
ham Well the weather in cali's great. But its complexities are great. You need a car to move freely, its taxes are outrageous. But all in all its a great place. The sad part is i missing home.
ham Now only i reached home. . . I am very tired now. . I will come tomorro
ham Ryder unsold.now gibbs.
spam Dear Subscriber ur draw 4 £100 gift voucher will b entered on receipt of a correct ans. When was Elvis Presleys Birthday? TXT answer to 80062
ham Don't fret. I'll buy the ovulation test strips and send them to you. You wont get them til like march. Can you send me your postal address.u'll be alright.Okay.
ham NO GIFTS!! You trying to get me to throw myself off a cliff or something?
ham Been up to ne thing interesting. Did you have a good birthday? When are u wrking nxt? I started uni today.
ham You busy or can I come by at some point and figure out what we're doing tomorrow
ham Yeah go on then, bored and depressed sittin waitin for phone to ring... Hope the wind drops though, scary
ham Black shirt n blue jeans... I thk i c ü...
ham Aiyah sorry lor... I watch tv watch until i forgot 2 check my phone.
spam Message Important information for O2 user. Today is your lucky day! 2 find out why log onto http://www.urawinner.com there is a fantastic surprise awaiting you
ham on hen night. Going with a swing
ham Good afternoon, my love. How goes your day ? What are you up to ? I woke early and am online waiting for you ... Hmmm ... Italian boy is online I see . *grins*
ham From someone not to smoke when every time I've smoked in the last two weeks is because of you calling or texting me that you wanted to smoke
ham No you'll just get a headache trying to figure it out. U can trust me to do the math. I promise. O:-)
ham S s..first time..dhoni rocks...
ham Ok ill tell the company
ham Awesome, think we can get an 8th at usf some time tonight?
ham So that means you still think of teju
ham No I'm good for the movie, is it ok if I leave in an hourish?
ham No no:)this is kallis home ground.amla home town is durban:)
ham So lets make it saturday or monday as per convenience.
ham Hey... What time is your driving on fri? We go for evaluation on fri?
spam 449050000301 You have won a £2,000 price! To claim, call 09050000301.
ham I'm going 4 lunch now wif my family then aft dat i go str 2 orchard lor.
spam Bored of speed dating? Try SPEEDCHAT, txt SPEEDCHAT to 80155, if you don't like em txt SWAP and get a new chatter! Chat80155 POBox36504W45WQ 150p/msg rcd 16
ham Cancel cheyyamo?and get some money back?
spam Do you want 750 anytime any network mins 150 text and a NEW video phone for only five pounds per week call 08000776320 now or reply for delivery Tomorrow
ham Ok.ok ok..then..whats ur todays plan
ham Good morning princess! How are you?
ham Aiyar sorry lor forgot 2 tell u...
spam For taking part in our mobile survey yesterday! You can now have 500 texts 2 use however you wish. 2 get txts just send TXT to 80160 T&C www.txt43.com 1.50p
ham Not tonight mate. Catching up on some sleep. This is my new number by the way.
ham Height of "Oh shit....!!" situation: A guy throws a luv letter on a gal but falls on her brothers head whos a gay,.;-):-D
spam Ur HMV Quiz cash-balance is currently £500 - to maximize ur cash-in now send HMV1 to 86688 only 150p/msg
ham So check your errors and if you had difficulties, do correction.
ham Howz pain?hope u r fine..
ham Sorry, I'll call later
ham Good morning princess! How are you?
ham As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there.
ham U wake up already? Thanx 4 e tau sar piah it's quite nice.
ham K do I need a login or anything
spam Dont forget you can place as many FREE Requests with 1stchoice.co.uk as you wish. For more Information call 08707808226.
ham LOL ... No just was busy
ham What * u wearing?
ham Message:some text missing* Sender:Name Missing* *Number Missing *Sent:Date missing *Missing U a lot thats y everything is missing sent via fullonsms.com
ham Oh:)as usual vijay film or its different?
spam I don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.
ham Have you had a good day? Mine was really busy are you up to much tomorrow night?
ham And is there a way you can send shade's stuff to her. And she has been wonderful too.
ham Really... I tot ur paper ended long ago... But wat u copied jus now got use? U happy lar... I still haf 2 study :-(
spam Thank you, winner notified by sms. Good Luck! No future marketing reply STOP to 84122 customer services 08450542832
ham Babe ? I lost you ... :-(
ham Ok... Help me ask if she's working tmr a not?
ham I'm not driving... Raining! Then i'll get caught at e mrt station lor.
ham Not a drop in the tank
ham (That said can you text him one more time?)
ham Sorry, I'll call later
ham Ok i go change also...
spam 1000's of girls many local 2 u who r virgins 2 this & r ready 2 4fil ur every sexual need. Can u 4fil theirs? text CUTE to 69911(£1.50p. m)
ham Did u find a sitter for kaitlyn? I was sick and slept all day yesterday.
ham Sorry man, accidentally left my phone on silent last night and didn't check it til I got up
ham Hey.. Something came up last min.. Think i wun be signing up tmr.. Hee
ham He's an adult and would learn from the experience. There's no real danger. I just dont like peeps using drugs they dont need. But no comment
ham Hey! There's veggie pizza... :/
ham Yun buying... But school got offer 2000 plus only...
ham You sure your neighbors didnt pick it up
ham K. I will sent it again
spam Free entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's
ham New Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!
ham Well. Im computerless. Time to make some oreo truffles
ham Haha yeah I see that now, be there in a sec
ham I am not having her number sir
ham Lol now I'm after that hot air balloon!
ham Ok . . now i am in bus. . If i come soon i will come otherwise tomorrow
ham Msgs r not time pass.They silently say that I am thinking of U right now and also making U think of me at least 4 a moment. Gd nt.swt drms @Shesil
ham Yeah, we can probably swing by once my roommate finishes up with his girl
spam Got what it takes 2 take part in the WRC Rally in Oz? U can with Lucozade Energy! Text RALLY LE to 61200 (25p), see packs or lucozade.co.uk/wrc & itcould be u!
ham Happy new years melody!
ham Ü dun need to pick ur gf?
ham Yay! You better not have told that to 5 other girls either.
ham Horrible u eat macs eat until u forgot abt me already rite... U take so long 2 reply. I thk it's more toot than b4 so b prepared. Now wat shall i eat?
ham Did he say how fantastic I am by any chance, or anything need a bigger life lift as losing the will 2 live, do you think I would be the first person 2 die from N V Q?
ham Just nw i came to hme da..
ham I'm outside islands, head towards hard rock and you'll run into me
ham To day class is there are no class.
ham I'm in chennai velachery:)
ham You flippin your shit yet?
ham K give me a sec, breaking a <#> at cstore
ham Am i that much bad to avoid like this?
ham Yo, you around? Just got my car back
ham Annoying isn't it.
ham Goodmorning, Today i am late for <#> min.
ham There's no point hangin on to mr not right if he's not makin u happy
ham All will come alive.better correct any good looking figure there itself..
ham In that case I guess I'll see you at campus lodge
ham We're done...
ham Come to my home for one last time i wont do anything. Trust me.
ham I was up all night too worrying about this appt. It's a shame we missed a girls night out with quizzes popcorn and you doing my hair.
spam Sex up ur mobile with a FREE sexy pic of Jordan! Just text BABE to 88600. Then every wk get a sexy celeb! PocketBabe.co.uk 4 more pics. 16 £3/wk 087016248
ham Ok... C ya...
spam You have 1 new voicemail. Please call 08719181503
ham What he said is not the matter. My mind saying some other matter is there.
ham He also knows about lunch menu only da. . I know
ham Al he does is moan at me if n e thin goes wrong its my fault&al de arguments r my fault&fed up of him of himso y bother? Hav 2go, thanx.xx
ham NEFT Transaction with reference number <#> for Rs. <DECIMAL> has been credited to the beneficiary account on <#> at <TIME> : <#>
ham Otherwise had part time job na-tuition..
ham I know she called me
ham Me also da, i feel yesterday night wait til 2day night dear.
ham Thanks for understanding. I've been trying to tell sura that.
spam WIN a year supply of CDs 4 a store of ur choice worth £500 & enter our £100 Weekly draw txt MUSIC to 87066 Ts&Cs www.Ldew.com.subs16+1win150ppmx3
ham The whole car appreciated the last two! Dad and are having a map reading semi argument but apart from that things are going ok. P.
spam As a SIM subscriber, you are selected to receive a Bonus! Get it delivered to your door, Txt the word OK to No: 88600 to claim. 150p/msg, EXP. 30Apr
ham I need you to be in my strong arms...
ham Also maaaan are you missing out
ham His bday real is in april .
ham Guessin you ain't gonna be here before 9?
ham Ok then i will come to ur home after half an hour
ham Yo, the game almost over? Want to go to walmart soon
ham Yeah, probably but not sure. Ilol let u know, but personally I wuldnt bother, then again if ur goin to then I mite as well!!
ham I'll text now! All creepy like so he won't think that we forgot
ham that would be good … I'll phone you tomo lunchtime, shall I, to organise something?
spam You have 1 new voicemail. Please call 08719181513.
ham Damn, can you make it tonight or do you want to just wait til tomorrow
ham K..k..i'm also fine:)when will you complete the course?
ham True. It is passable. And if you get a high score and apply for phd, you get 5years of salary. So it makes life easier.
spam No. 1 Nokia Tone 4 ur mob every week! Just txt NOK to 87021. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info
ham Prakesh is there know.
ham Teach me apps da. When you come to college.
ham Rofl betta invest in some anti aging products
spam You are a winner U have been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810810
ham sir, you will receive the account no another 1hr time. Sorry for the delay.
spam Reply with your name and address and YOU WILL RECEIVE BY POST a weeks completely free accommodation at various global locations www.phb1.com ph:08700435505150p
ham So ü'll be submitting da project tmr rite?
spam FREE entry into our £250 weekly comp just send the word ENTER to 84128 NOW. 18 T&C www.textcomp.com cust care 08712405020.
ham Jus ans me lar. U'll noe later.
ham I want to send something that can sell fast. <#> k is not easy money.
ham have got * few things to do. may be in * pub later.
ham 1's finish meeting call me.
ham Lol ok. I'll snatch her purse too.
ham "Hello-/@drivby-:0quit edrunk sorry iff pthis makes no senrd-dnot no how ^ dancce 2 drum n basq!ihave fun 2nhite x ros xxxxxxx"
ham Your opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..
ham How much are we getting?
ham Is ur paper in e morn or aft tmr?
ham Dear relieved of westonzoyland, all going to plan this end too!
ham Hope you are having a great new semester. Do wish you the very best. You are made for greatness.
ham Oh yes I can speak txt 2 u no! Hmm. Did u get email?
ham I want to show you the world, princess :) how about europe?
ham Nobody can decide where to eat and dad wants Chinese
ham No shoot me. I'm in the docs waiting room. :/
ham Now? I'm going out 4 dinner soon..
ham Hello which the site to download songs its urgent pls
ham I do know what u mean, is the king of not havin credit! I'm goin2bed now. Night night sweet! Only1more sleep!
ham Horrible gal. Me in sch doing some stuff. How come u got mc?
ham HI HUN! IM NOT COMIN 2NITE-TELL EVERY1 IM SORRY 4 ME, HOPE U AVA GOODTIME!OLI RANG MELNITE IFINK IT MITE B SORTED,BUT IL EXPLAIN EVERYTHIN ON MON.L8RS.x
ham I call you later, don't have network. If urgnt, sms me.
ham Ummmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear
spam Please CALL 08712402779 immediately as there is an urgent message waiting for you
ham Yeah like if it goes like it did with my friends imma flip my shit in like half an hour
ham Mum say we wan to go then go... Then she can shun bian watch da glass exhibition...
ham What your plan for pongal?
ham Just wait till end of march when el nino gets himself. Oh.
ham Not yet chikku..going to room nw, i'm in bus..
ham Am also doing in cbe only. But have to pay.
ham Honey boo I'm missing u.
ham We have sent JD for Customer Service cum Accounts Executive to ur mail id, For details contact us
ham Yo, I'm at my parents' gettin cash. Good news: we picked up a downstem
ham Thank you so much. When we skyped wit kz and sura, we didnt get the pleasure of your company. Hope you are good. We've given you ultimatum oh! We are countin down to aburo. Enjoy!
spam Hungry gay guys feeling hungry and up 4 it, now. Call 08718730555 just 10p/min. To stop texts call 08712460324 (10p/min)
ham Ok. No wahala. Just remember that a friend in need ...
ham I will see in half an hour
ham Im in inperialmusic listening2the weirdest track ever byleafcutter john-sounds like insects being molested&someone plumbing,remixed by evil men on acid!
ham "Hey sorry I didntgive ya a a bellearlier hunny,just been in bedbut mite go 2 thepub l8tr if uwana mt up?loads a luv Jenxxx."
ham SERIOUSLY. TELL HER THOSE EXACT WORDS RIGHT NOW.
spam Can U get 2 phone NOW? I wanna chat 2 set up meet Call me NOW on 09096102316 U can cum here 2moro Luv JANE xx Calls£1/minmoremobsEMSPOBox45PO139WA
ham Tee hee. Off to lecture, cheery bye bye.
ham Sorry chikku, my cell got some problem thts y i was nt able to reply u or msg u..
ham If you still havent collected the dough pls let me know so i can go to the place i sent it to get the control number
ham Ok...
spam network operator. The service is free. For T & C's visit 80488.biz
ham Let me know how to contact you. I've you settled in a room. Lets know you are ok.
ham Wot u up 2 u weirdo?
ham Can do lor...
ham Dont put your phone on silent mode ok
ham Can i meet ü at 5.. As 4 where depends on where ü wan 2 in lor..
ham Waiting 4 my tv show 2 start lor... U leh still busy doing ur report?
ham Oh ho. Is this the first time u use these type of words
ham Am I the only one who doesn't stalk profiles?
ham Ever green quote ever told by Jerry in cartoon "A Person Who Irritates u Always Is the one Who Loves u Vry Much But Fails to Express It...!..!! :-) :-) gud nyt
ham Yes i thought so. Thanks.
ham But if she.s drinkin i'm ok.
ham Just wondering, the others just took off
ham Night has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng
ham What do you do, my dog ? Must I always wait till the end of your day to have word from you ? Did you run out of time on your cell already?
ham Happy new year to u too!
ham Hey...Great deal...Farm tour 9am to 5pm $95/pax, $50 deposit by 16 May
ham Eat jap done oso aft ur lect wat... Ü got lect at 12 rite...
ham Hey babe! I saw you came online for a second and then you disappeared, what happened ?
ham Da my birthdate in certificate is in april but real date is today. But dont publish it. I shall give you a special treat if you keep the secret. Any way thanks for the wishes
ham Happy birthday... May all ur dreams come true...
ham Aiyah u did ok already lar. E nydc at wheellock?
ham TELL HER I SAID EAT SHIT.
ham Sure! I am driving but will reach my destination soon.
ham K so am I, how much for an 8th? Fifty?
ham Your daily text from me – a favour this time
ham Great to hear you are settling well. So what's happenin wit ola?
ham Those cocksuckers. If it makes you feel better ipads are worthless garbage novelty items and you should feel bad for even wanting one
ham I tot u reach liao. He said t-shirt.
ham FRAN I DECIDED 2 GO N E WAY IM COMPLETELY BROKE AN KNACKERED I GOT UP BOUT 3 C U 2MRW LOVE JANX P.S THIS IS MY DADS FONE, -NO CREDIT
ham I cant pick the phone right now. Pls send a message
ham Your right! I'll make the appointment right now.
ham Designation is software developer and may be she get chennai:)
spam Enjoy the jamster videosound gold club with your credits for 2 new videosounds+2 logos+musicnews! get more fun from jamster.co.uk! 16+only Help? call: 09701213186
spam Get 3 Lions England tone, reply lionm 4 mono or lionp 4 poly. 4 more go 2 www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply
ham I jokin oni lar.. Ü busy then i wun disturb ü.
ham Ok, be careful ! Don't text and drive !
ham I.ll always be there, even if its just in spirit. I.ll get a bb soon. Just trying to be sure i need it.
ham U r too much close to my heart. If u go away i will be shattered. Plz stay with me.
ham I love u 2 babe! R u sure everything is alrite. Is he being an idiot? Txt bak girlie
ham How abt making some of the pics bigger?
ham Got but got 2 colours lor. One colour is quite light n e other is darker lor. Actually i'm done she's styling my hair now.
ham Whenevr ur sad, Whenevr ur gray, Remembr im here 2 listn 2 watevr u wanna say, Jus walk wid me a little while,& I promise I'll bring back ur smile.:-)
ham Why nothing. Ok anyway give me treat
spam Win the newest “Harry Potter and the Order of the Phoenix (Book 5) reply HARRY, answer 5 questions - chance to be the first among readers!
ham Ok...
ham Correct. So how was work today
ham Just sent again. Do you scream and moan in bed, princess?
ham I wake up long ago already... Dunno, what other thing?
ham Oh just getting even with u.... u?
ham I thk 50 shd be ok he said plus minus 10.. Did ü leave a line in between paragraphs?
ham Can you call me plz. Your number shows out of coveragd area. I have urgnt call in vasai & have to reach before 4'o clock so call me plz
ham Yeah jay's sort of a fucking retard
ham Sorry, was in the bathroom, sup
spam Ur balance is now £500. Ur next question is: Who sang 'Uptown Girl' in the 80's ? 2 answer txt ur ANSWER to 83600. Good luck!
ham My exam is for february 4. Wish you a great day.
ham I dont know what to do to come out of this so only am ask questions like this dont mistake me.
ham Aight no rush, I'll ask jay
ham Good Morning plz call me sir
ham It's ok lar. U sleep early too... Nite...
ham Oh... Icic... K lor, den meet other day...
ham Oh ! A half hour is much longer in Syria than Canada, eh ? Wow you must get SO much more work done in a day than us with all that extra time ! *grins*
ham Sometimes we put walls around our hearts,not just to be safe from getting hurt.. But to find out who cares enough to break the walls & get closer.. GOODNOON:)
ham Sweet, we may or may not go to 4U to meet carlos so gauge patty's interest in that
ham Then she buying today? Ü no need to c meh...
ham Aight sorry I take ten years to shower. What's the plan?
ham Every monday..nxt week vl be completing..
ham Might ax well im there.
ham Just chill for another 6hrs. If you could sleep the pain is not a surgical emergency so see how it unfolds. Okay
ham Yeah I'll try to scrounge something up
ham Crazy ar he's married. Ü like gd looking guys not me. My frens like say he's korean leona's fave but i dun thk he is. Aft some thinking mayb most prob i'll go.
ham Were somewhere on Fredericksburg
ham Que pases un buen tiempo or something like that
ham Is it ok if I stay the night here? Xavier has a sleeping bag and I'm getting tired
ham She doesnt need any test.
ham Nothing much, chillin at home. Any super bowl plan?
spam FREE2DAY sexy St George's Day pic of Jordan!Txt PIC to 89080 dont miss out, then every wk a saucy celeb!4 more pics c PocketBabe.co.uk 0870241182716 £3/wk
ham Bugis oso near wat...
ham Yo theres no class tmrw right?
ham Let Ur Heart Be Ur Compass Ur Mind Ur Map Ur Soul Ur Guide And U Will Never loose in world....gnun - Sent via WAY2SMS.COM
ham Goodnight, sleep well da please take care pa. Please.
ham Baaaaabe! I misss youuuuu ! Where are you ? I have to go and teach my class at 5 ...
ham Convey my regards to him
ham U ned to convince him tht its not possible witot hurting his feeling its the main
ham Good afternoon loverboy ! How goes you day ? Any luck come your way? I think of you, sweetie and send my love across the sea to make you smile and happy
ham If i start sending blackberry torch to nigeria will you find buyer for me?like 4a month. And tell dad not to buy bb from anyone oh.
ham <#> %of pple marry with their lovers... becz they hav gud undrstndng dat avoids problems. i sent dis 2 u, u wil get gud news on friday by d person you like. And tomorrow will be the best day of your life. Dont break this chain. If you break you will suffer. send this to <#> frnds in <#> mins whn u read...
ham Yo dude guess who just got arrested the other day
ham Shuhui say change 2 suntec steamboat? U noe where? Where r u now?
ham What does the dance river do?
ham Yetunde, i'm sorry but moji and i seem too busy to be able to go shopping. Can you just please find some other way to get what you wanted us to get. Please forgive me. You can reply free via yahoo messenger.
ham Hey i will be really pretty late... You want to go for the lesson first? I will join you. I'm only reaching tp mrt
spam HOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870..k
ham Bbq this sat at mine from 6ish. Ur welcome 2 come
ham I don't know, same thing that's wrong everyso often, he panicks starts goin on bout not bein good enough …
ham Alright. I'm out--have a good night!
ham Did you try making another butt.
ham Hope you are feeling great. Pls fill me in. Abiola
ham I though we shd go out n have some fun so bar in town or something – sound ok?
ham 1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)
spam Bears Pic Nick, and Tom, Pete and ... Dick. In fact, all types try gay chat with photo upload call 08718730666 (10p/min). 2 stop texts call 08712460324
spam 500 New Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today!From ONLY £1 www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18 TXTAUCTION
ham We're finally ready fyi
ham Auntie huai juan never pick up her phone
spam Double Mins & Double Txt & 1/2 price Linerental on Latest Orange Bluetooth mobiles. Call MobileUpd8 for the very latest offers. 08000839402 or call2optout/LF56
ham Ya tel, wats ur problem..
spam No. 1 Nokia Tone 4 ur mob every week! Just txt NOK to 87021. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info
ham i dnt wnt to tlk wid u
ham We spend our days waiting for the ideal path to appear in front of us.. But what we forget is.. "paths are made by walking.. not by waiting.." Goodnight!
ham Its ok my arm is feeling weak cuz i got a shot so we can go another time
ham Please reserve ticket on saturday eve from chennai to thirunelvali and again from tirunelvali to chennai on sunday eve...i already see in net..no ticket available..i want to book ticket through tackle ..
ham Storming msg: Wen u lift d phne, u say "HELLO" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? "Margaret Hello" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@
ham That's ok. I popped in to ask bout something and she said you'd been in. Are you around tonght wen this girl comes?
ham All e best 4 ur exam later.
ham Hope ur head doesn't hurt 2 much ! Am ploughing my way through a pile of ironing ! Staying in with a chinky tonight come round if you like.
ham Oh k.i think most of wi and nz players unsold.
ham Haha... Where got so fast lose weight, thk muz go 4 a month den got effect... Gee,later we go aust put bk e weight.
ham I wonder how you got online, my love ? Had you gone to the net cafe ? Did you get your phone recharged ? Were you on a friends net ? I think of you, boytoy
ham Haha just kidding, papa needs drugs
ham Thk shld b can... Ya, i wana go 4 lessons... Haha, can go for one whole stretch...
ham Oh ok..
ham R we still meeting 4 dinner tonight?
ham Thats cool! I am a gentleman and will treat you with dignity and respect.
ham Shall i start from hear.
ham Then we wait 4 u lor... No need 2 feel bad lar...
ham No did you check? I got his detailed message now
ham You have registered Sinco as Payee. Log in at icicibank.com and enter URN <#> to confirm. Beware of frauds. Do NOT share or disclose URN to anyone.
ham No, I decided that only people who care about stuff vote and caring about stuff is for losers
ham Kaiez... Enjoy ur tuition... Gee... Thk e second option sounds beta... I'll go yan jiu den msg u...
ham You have registered Sinco as Payee. Log in at icicibank.com and enter URN <#> to confirm. Beware of frauds. Do NOT share or disclose URN to anyone.
ham cool. We will have fun practicing making babies!
ham Actually getting ready to leave the house.
ham K..k..any special today?
spam URGENT, IMPORTANT INFORMATION FOR O2 USER. TODAY IS YOUR LUCKY DAY! 2 FIND OUT WHY LOG ONTO HTTP://WWW.URAWINNER.COM THERE IS A FANTASTIC SURPRISE AWAITING FOR YOU
ham Then we gotta do it after that
ham I've got ten bucks, jay is being noncomittal
ham Where at were hungry too
ham Pls speak to that customer machan.
ham somewhere out there beneath the pale moon light someone think in of u some where out there where dreams come true... goodnite & sweet dreams
ham Wen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.
spam Dear U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18 yrs
ham So wats ur opinion abt him and how abt is character?
ham Jay is snickering and tells me that x is totally fucking up the chords as we speak
ham No..few hours before.went to hair cut .
ham No wonder... Cos i dun rem seeing a silver car... But i thk i saw a black one...
ham Lmao. Take a pic and send it to me.
ham "Speak only when you feel your words are better than the silence..." Gud mrng:-)
ham No. She's currently in scotland for that.
ham Do you work all this week ?
spam Congratulations ur awarded either £500 of CD gift vouchers & Free entry 2 our £100 weekly draw txt MUSIC to 87066 TnCs www.Ldew.com 1 win150ppmx3age16
ham Lol great now im getting hungry.
ham Yes.. now only saw your message..
ham I'll be at mu in like <#> seconds
ham Ok...
ham THING R GOOD THANX GOT EXAMS IN MARCH IVE DONE NO REVISION? IS FRAN STILL WITH BOYF? IVE GOTTA INTERVIW 4 EXETER BIT WORRIED!x
ham Tell you what, if you make a little spreadsheet and track whose idea it was to smoke to determine who "smokes too much" for the entire month of february, I'll come up
spam For sale - arsenal dartboard. Good condition but no doubles or trebles!
ham Don't look back at the building because you have no coat and i don't want you to get more sick. Just hurry home and wear a coat to the gym!!!
ham My painful personal thought- "I always try to keep everybody happy all the time. But nobody recognises me when i am alone"
ham Thanks for ve lovely wisheds. You rock
ham You intrepid duo you! Have a great time and see you both soon.
ham I asked sen to come chennai and search for job.
ham Dad went out oredi...
ham I jus hope its true that missin me cos i'm really missin him! You haven't done anything to feel guilty about, yet.
ham Wat so late still early mah. Or we juz go 4 dinner lor. Aiya i dunno...
ham Arms fine, how's Cardiff and uni?
ham In fact when do you leave? I think addie goes back to school tues or wed
ham Cool breeze... Bright sun... Fresh flower... Twittering birds... All these waiting to wish u: "GOODMORNING & HAVE A NICE DAY" :)
ham Ya:)going for restaurant..
ham Its ok., i just askd did u knw tht no?
spam Free 1st week entry 2 TEXTPOD 4 a chance 2 win 40GB iPod or £250 cash every wk. Txt POD to 84128 Ts&Cs www.textpod.net custcare 08712405020.
ham Those ducking chinchillas
ham I am in a marriage function
ham Looks like u wil b getting a headstart im leaving here bout 2.30ish but if u r desperate for my company I could head in earlier-we were goin to meet in rummer.
ham Don‘t give a flying monkeys wot they think and I certainly don‘t mind. Any friend of mine and all that!
spam As a registered optin subscriber ur draw 4 £100 gift voucher will be entered on receipt of a correct ans to 80062 Whats No1 in the BBC charts
ham say thanks2.
ham Msg me when rajini comes.
ham Ya! when are ü taking ure practical lessons? I start in june..
ham That's good, because I need drugs
ham Stupid.its not possible
ham Can ü all decide faster cos my sis going home liao..
spam Summers finally here! Fancy a chat or flirt with sexy singles in yr area? To get MATCHED up just reply SUMMER now. Free 2 Join. OptOut txt STOP Help08714742804
ham U sleeping now.. Or you going to take? Haha.. I got spys wat.. Me online checking n replying mails lor..
spam CLAIRE here am havin borin time & am now alone U wanna cum over 2nite? Chat now 09099725823 hope 2 C U Luv CLAIRE xx Calls£1/minmoremobsEMSPOBox45PO139WA
ham Fighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.
spam Bought one ringtone and now getting texts costing 3 pound offering more tones etc
ham Yalru lyfu astne chikku.. Bt innu mundhe lyf ali halla ke bilo (marriage)program edhae, so lyf is nt yet ovr chikku..ali vargu lyfu meow meow:-D
ham Kinda. First one gets in at twelve! Aah. Speak tomo
spam 09066362231 URGENT! Your mobile No 07xxxxxxxxx won a £2,000 bonus caller prize on 02/06/03! this is the 2nd attempt to reach YOU! call 09066362231 ASAP!
ham Ok good then i later come find ü... C lucky i told ü to go earlier... Later pple take finish ü no more again...
ham Wat makes u thk i'll fall down. But actually i thk i'm quite prone 2 falls. Lucky my dad at home i ask him come n fetch me already.
spam YOU 07801543489 are guaranteed the latests Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word:COLLECT to No:83355! TC-LLC NY-USA 150p/Mt msgrcvd18+
ham Your account has been refilled successfully by INR <DECIMAL> . Your KeralaCircle prepaid account balance is Rs <DECIMAL> . Your Transaction ID is KR <#> .
ham I wont touch you with out your permission.
spam Hi its LUCY Hubby at meetins all day Fri & I will B alone at hotel U fancy cumin over? Pls leave msg 2day 09099726395 Lucy x Calls£1/minMobsmoreLKPOBOX177HP51FL
ham 7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st "Ur Lovely Friendship"... good morning dear
ham Take some small dose tablet for fever
ham Oh. U must have taken your REAL Valentine out shopping first.
ham Just sent you an email – to an address with incomm in it, is that right?
ham Will do, you gonna be at blake's all night? I might be able to get out of here a little early
ham Friendship is not a game to play, It is not a word to say, It doesn\'t start on March and ends on May, It is tomorrow, yesterday, today and e
ham Nice. Wait...should you be texting right now? I'm not gonna pay your ticket, ya know!
ham I'm watching lotr w my sis dis aft. So u wan 2 meet me 4 dinner at nite a not?
ham Why you keeping me away like this
ham I think its far more than that but find out. Check google maps for a place from your dorm.
ham My trip was ok but quite tiring lor. Uni starts today but it's ok 4 me cos i'm not taking any modules but jus concentrating on my final yr project.
ham Have you always been saying welp?
ham I'm a guy, browsin is compulsory
ham Ok...
ham Purity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng musthu
ham Sorry, I'll call later
ham (I should add that I don't really care and if you can't I can at least get this dude to fuck off but hey, your money if you want it)
ham Hello lover! How goes that new job? Are you there now? Are you happy? Do you think of me? I wake, my slave and send you a teasing kiss from across the sea
ham I told your number to gautham..
ham Tell them no need to investigate about me anywhere.
ham Ok i juz receive..
ham Cant believe i said so many things to you this morning when all i really wanted to say was good morning, i love you! Have a beautiful morning. See you in the library later.
spam Your account has been credited with 500 FREE Text Messages. To activate, just txt the word: CREDIT to No: 80488 T&Cs www.80488.biz
ham In the end she might still vomit but its okay. Not everything will come out.
ham How are you with moneY...as in to you...money aint a thing....how are you sha!
ham It has everything to do with the weather. Keep extra warm. Its a cold but nothing serious. Pls lots of vitamin c
ham Hey gals.. Anyone of u going down to e driving centre tmr?
ham I'm always on yahoo messenger now. Just send the message to me and i.ll get it you may have to send it in the mobile mode sha but i.ll get it. And will reply.
ham I'm putting it on now. It should be ready for <TIME>
ham Time n Smile r the two crucial things in our life. Sometimes time makes us to forget smile, and sometimes someone's smile makes us to forget time gud noon
spam SMS. ac JSco: Energy is high, but u may not know where 2channel it. 2day ur leadership skills r strong. Psychic? Reply ANS w/question. End? Reply END JSCO
ham Host-based IDPS for linux systems.
spam HOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call
ham Don no da:)whats you plan?
ham Ill be there on <#> ok.
ham Oh my God. I'm almost home
ham Total video converter free download type this in google search:)
spam Thanks for the Vote. Now sing along with the stars with Karaoke on your mobile. For a FREE link just reply with SING now.
ham Wen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.
ham Sounds like something that someone testing me would sayy
ham When u love someone Dont make them to love u as much as u do. But Love them so much that they dont want to be loved by anyone except you... Gud nit.
ham Pete,is this your phone still? Its Jenny from college and Leanne.what are you up to now?:)
ham Oops sorry. Just to check that you don't mind picking me up tomo at half eight from station. Would that be ok?
ham Hey sweet, I was wondering when you had a moment if you might come to me ? I want to send a file to someone but it won't go over yahoo for them because their connection sucks, remember when you set up that page for me to go to and download the format disc ? Could you tell me how to do that ? Or do you know some other way to download big files ? Because they can download stuff directly from the internet. Any help would be great, my prey ... *teasing kiss*
ham Hows the champ just leaving glasgow!
ham K:)all the best:)congrats...
ham I wonder if you'll get this text?
ham I need to come home and give you some good lovin...
spam Our brand new mobile music service is now live. The free music player will arrive shortly. Just install on your phone to browse content from the top artists.
ham Shall i ask one thing if you dont mistake me.
ham Check wid corect speling i.e. Sarcasm
spam URGENT! Your Mobile No was awarded a £2,000 Bonus Caller Prize on 1/08/03! This is our 2nd attempt to contact YOU! Call 0871-4719-523 BOX95QU BT National Rate
ham Are you angry with me. What happen dear
ham I thk u dun haf 2 hint in e forum already lor... Cos i told ron n darren is going 2 tell shuhui.
ham Yup ok thanx...
ham Hi:)cts employee how are you?
ham Pls pls find out from aunt nike.
ham Wow ... I love you sooo much, you know ? I can barely stand it ! I wonder how your day goes and if you are well, my love ... I think of you and miss you
ham No screaming means shouting..
ham Hey what happen de. Are you alright.
ham Should I have picked up a receipt or something earlier
ham I think chennai well settled?
ham Oh dang! I didn't mean o send that to you! Lol!
ham Unfortunately i've just found out that we have to pick my sister up from the airport that evening so don't think i'll be going out at all. We should try to go out one of th
ham Horrible bf... I now v hungry...
ham Remember on that day..
spam You have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+
ham How's it feel? Mr. Your not my real Valentine just my yo Valentine even tho u hardly play!!
ham All sounds good. Fingers . Makes it difficult to type
ham Midnight at the earliest
ham You're not sure that I'm not trying to make xavier smoke because I don't want to smoke after being told I smoke too much?
ham K come to nordstrom when you're done
ham Do u konw waht is rael FRIENDSHIP Im gving yuo an exmpel: Jsut ese tihs msg.. Evrey splleing of tihs msg is wrnog.. Bt sitll yuo can raed it wihtuot ayn mitsake.. GOODNIGHT & HAVE A NICE SLEEP..SWEET DREAMS..
ham Now press conference da:)
spam Hello from Orange. For 1 month's free access to games, news and sport, plus 10 free texts and 20 photo messages, reply YES. Terms apply: www.orange.co.uk/ow
ham After completed degree. There is no use in joining finance.
ham Good afternoon, my love ! Any job prospects ? Are you missing me ? What do you do ? Are you being lazy and bleak, hmmm ? Or happy and filled with my love ?
ham Shant disturb u anymore... Jia you...
ham Bishan lar nearer... No need buy so early cos if buy now i gotta park my car...
ham Me, i dont know again oh
ham Dude sux for snake. He got old and raiden got buff
ham He says hi and to get your ass back to south tampa (preferably at a kegger)
ham In e msg jus now. U said thanks for gift.
ham U too...
ham Ok how you dear. Did you call chechi
ham Yeah we do totes. When u wanna?
ham Ok i found dis pierre cardin one which looks normal costs 20 its on sale.
ham Good sleep is about rhythm. The person has to establish a rhythm that the body will learn and use. If you want to know more :-)
ham Wat r u doing?
ham Message from . I am at Truro Hospital on ext. You can phone me here. as I have a phone by my side
ham Single line with a big meaning::::: "Miss anything 4 ur "Best Life" but, don't miss ur best life for anything... Gud nyt...
ham Just got some gas money, any chance you and the gang want to go on a grand nature adventure?
ham Dnt worry...use ice pieces in a cloth pack.also take 2 tablets.
ham Dude just saw a parked car with its sunroof popped up. Sux
ham Get ready to put on your excellent sub face :)
ham Tmrw. Im finishing 9 doors
ham The <#> g that i saw a few days ago, the guy wants sell wifi only for <#> and with 3g for <#> . That's why i blanked him.
ham I am late. I will be there at
ham whatever, im pretty pissed off.
ham Today is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy
ham I dont have that much image in class.
ham No:-)i got rumour that you going to buy apartment in chennai:-)
ham Near kalainar tv office.thenampet
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/msg. CC 08718720201 HG/Suite342/2Lands Row/W1J6HL
spam SMS AUCTION - A BRAND NEW Nokia 7250 is up 4 auction today! Auction is FREE 2 join & take part! Txt NOKIA to 86021 now! HG/Suite342/2Lands Row/W1J6HL
ham My sis is catching e show in e afternoon so i'm not watching w her. So c u wan 2 watch today or tmr lor.
ham Sounds gd... Haha... Can... Wah, u yan jiu so fast liao...
ham No. To be nosy I guess. Idk am I over reacting if I'm freaked?
ham Remember all those whom i hurt during days of satanic imposter in me.need to pay a price,so be it.may destiny keep me going and as u said pray that i get the mind to get over the same.
ham How to Make a girl Happy? It's not at all difficult to make girls happy. U only need to be... 1. A friend 2. Companion 3. Lover 4. Chef . . . <#> . Good listener <#> . Organizer <#> . Good boyfriend <#> . Very clean <#> . Sympathetic <#> . Athletic <#> . Warm . . . <#> . Courageous <#> . Determined <#> . True <#> . Dependable <#> . Intelligent . . . <#> . Psychologist <#> . Pest exterminator <#> . Psychiatrist <#> . Healer . . <#> . Stylist <#> . Driver . . Aaniye pudunga venaam..
ham Why is that, princess? I bet the brothas are all chasing you!
ham I shall book chez jules for half eight, if that's ok with you?
ham Hhahhaahahah rofl wtf nig was leonardo in your room or something
ham Yep, at derek's house now, see you Sunday <3
ham It's cool, let me know before it kicks off around <#> , I'll be out and about all day
ham Sorry, I'll call later
ham I was wondering if it would be okay for you to call uncle john and let him know that things are not the same in nigeria as they r here. That <#> dollars is 2years sent and that you know its a strain but i plan to pay back every dime he gives. Every dime so for me to expect anything from you is not practical. Something like that.
ham There are no other charges after transfer charges and you can withdraw anyhow you like
ham Dont search love, let love find U. Thats why its called falling in love, bcoz U dont force yourself, U just fall and U know there is smeone to hold U... BSLVYL
ham At 4. Let's go to bill millers
ham I love you. You set my soul on fire. It is not just a spark. But it is a flame. A big rawring flame. XoXo
ham Somewhr someone is surely made 4 u. And God has decided a perfect time to make u meet dat person. . . . till den, . . . . . Enjoy ur crushes..!!!;-)
ham That's my honeymoon outfit. :)
ham Will it help if we propose going back again tomorrow
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08719899230 Identifier Code: 41685 Expires 07/11/04
ham Never blame a day in ur life. Good days give u happiness. Bad days give u experience. Both are essential in life! All are Gods blessings! good morning.:
ham Pls confirm the time to collect the cheque.
spam As a Registered Subscriber yr draw 4 a £100 gift voucher will b entered on receipt of a correct ans. When are the next olympics. Txt ans to 80062
spam URGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790121 from land line. Claim 3030. Valid 12hrs only 150ppm
ham Daddy will take good care of you :)
ham Yeah probably, I still gotta check out with leo
ham K.then any other special?
ham Carlos is taking his sweet time as usual so let me know when you and patty are done/want to smoke and I'll tell him to haul ass
ham Ok pa. Nothing problem:-)
ham Have you heard about that job? I'm going to that wildlife talk again tonight if u want2come. Its that2worzels and a wizzle or whatever it is?!
ham God picked up a flower and dippeditinaDEW, lovingly touched itwhichturnedinto u, and the he gifted tomeandsaid,THIS FRIEND IS 4U
ham When you came to hostel.
ham Ok no prob... I'll come after lunch then...
ham Jus telling u dat i'll b leaving 4 shanghai on 21st instead so we'll haf more time 2 meet up cya...
ham Are your freezing ? Are you home yet ? Will you remember to kiss your mom in the morning? Do you love me ? Do you think of me ? Are you missing me yet ?
ham You all ready for * big day tomorrow?
ham I'll probably be around mu a lot
ham 645
spam RT-KIng Pro Video Club>> Need help? [email protected] or call 08701237397 You must be 16+ Club credits redeemable at www.ringtoneking.co.uk! Enjoy!
ham Thnx dude. u guys out 2nite?
ham Me sef dey laugh you. Meanwhile how's my darling anjie!
ham Mm i had my food da from out
ham K, makes sense, btw carlos is being difficult so you guys are gonna smoke while I go pick up the second batch and get gas
ham Did u download the fring app?
ham The 2 oz guy is being kinda flaky but one friend is interested in picking up $ <#> worth tonight if possible
ham Friends that u can stay on fb chat with
ham Fuck babe, I miss you sooooo much !! I wish you were here to sleep with me ... My bed is so lonely ... I go now, to sleep ... To dream of you, my love ...
ham Living is very simple.. Loving is also simple.. Laughing is too simple.. Winning is tooo simple.. But, being 'SIMPLE' is very difficult.. Gud nte.:-
spam U have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094599
ham Ah, well that confuses things, doesn‘t it?
spam 500 free text msgs. Just text ok to 80488 and we'll credit your account
ham Hi Dear Call me its urgnt. I don't know whats your problem. You don't want to work or if you have any other problem at least tell me. Wating for your reply.
ham Dear how you. Are you ok?
spam You have been selected to stay in 1 of 250 top British hotels - FOR NOTHING! Holiday Worth £350! To Claim, Call London 02072069400. Bx 526, SW73SS
ham Yes princess! I want to make you happy...
ham Sounds like you have many talents! would you like to go on a dinner date next week?
ham I am going to film 2day da. At 6pm. Sorry da.
ham We not watching movie already. Xy wants 2 shop so i'm shopping w her now.
ham Hello my little party animal! I just thought I'd buzz you as you were with your friends ...*grins*... Reminding you were loved and send a naughty adoring kiss
ham Yesterday its with me only . Now am going home.
spam Eerie Nokia tones 4u, rply TONE TITLE to 8007 eg TONE DRACULA to 8007 Titles: GHOST, ADDAMSFA, MUNSTERS, EXORCIST, TWILIGHT www.getzed.co.uk POBox36504W45WQ 150p
ham You have come into my life and brought the sun ..Shiny down on me, warming my heart. Putting a constant smile on my face ... Making me feel loved and cared for
ham No shit, but I wasn't that surprised, so I went and spent the evening with that french guy I met in town here and we fooled around a bit but I didn't let him fuck me
spam 0A$NETWORKS allow companies to bill for SMS, so they are responsible for their "suppliers", just as a shop has to give a guarantee on what they sell. B. G.
ham Great comedy..cant stop laughing da:)
spam FreeMsg:Feelin kinda lnly hope u like 2 keep me company! Jst got a cam moby wanna c my pic?Txt or reply DATE to 82242 Msg150p 2rcv Hlp 08712317606 stop to 82242
ham Alright, we're all set here, text the man
ham Hi , where are you? We're at and they're not keen to go out i kind of am but feel i shouldn't so can we go out tomo, don't mind do you?
ham Sleeping nt feeling well
ham U WILL SWITCH YOUR FONE ON DAMMIT!!
ham India have to take lead:)
ham I.ll post her out l8r. In class
ham Thts wat Wright Brother did to fly..
ham Evening * v good if somewhat event laden. Will fill you in, don't you worry … Head * ok but throat * wrecked. See you at six then!
ham If u laugh really loud.. If u talk spontaneously.. If u dont care what others feel.. U are probably with your dear & best friends.. GOODEVENING Dear..:)
ham ITS A LAPTOP TAKE IT WITH YOU.
ham I dont have any of your file in my bag..i was in work when you called me.i 'll tell you if i find anything in my room.
ham I wan but too early lei... Me outside now wun b home so early... Neva mind then...
spam For ur chance to win a £250 cash every wk TXT: ACTION to 80608. T's&C's www.movietrivia.tv custcare 08712405022, 1x150p/wk
ham I was at bugis juz now wat... But now i'm walking home oredi... Ü so late then reply... I oso saw a top dat i like but din buy... Where r ü now?
ham Wishing you and your family Merry "X" mas and HAPPY NEW Year in advance..
ham At 7 we will go ok na.
ham Yes I posted a couple of pics on fb. There's still snow outside too. I'm just waking up :)
ham S:-)if we have one good partnership going we will take lead:)
spam RGENT! This is the 2nd attempt to contact U!U have WON £1250 CALL 09071512433 b4 050703 T&CsBCM4235WC1N3XX. callcost 150ppm mobilesvary. max£7. 50
ham Yeah, where's your class at?
ham No just send to you. Bec you in temple na.
ham You aren't coming home between class, right? I need to work out and shower!
spam Hi if ur lookin 4 saucy daytime fun wiv busty married woman Am free all next week Chat now 2 sort time 09099726429 JANINExx Calls£1/minMobsmoreLKPOBOX177HP51FL
ham S but mostly not like that.
ham Ü v ma fan...
ham Dunno cos i was v late n when i reach they inside already... But we ate spageddies lor... It's e gals who r laughing at me lor...
ham Guess who spent all last night phasing in and out of the fourth dimension
ham So now my dad is gonna call after he gets out of work and ask all these crazy questions.
ham Yes..but they said its IT.,
ham Very hurting n meaningful lines ever: "I compromised everything for my love, But at d end my love compromised me for everything:-(".. Gud mornin:-)
ham Lmao!nice 1
ham Glad to see your reply.
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001295 from land line. Claim A21. Valid 12hrs only
spam Monthly password for wap. mobsi.com is 391784. Use your wap phone not PC.
ham Nah dub but je still buff
ham Painful words- "I thought being Happy was the most toughest thing on Earth... But, the toughest is acting Happy with all unspoken pain inside.."
ham Yeah, that's fine! It's £6 to get in, is that ok?
ham Lol where do u come up with these ideas?
ham So many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. Sh!jas
ham Today is "song dedicated day.." Which song will u dedicate for me? Send this to all ur valuable frnds but first rply me...
ham Okay... We wait ah
ham Y lei?
ham HI BABE U R MOST LIKELY TO BE IN BED BUT IM SO SORRY ABOUT TONIGHT! I REALLY WANNA SEE U TOMORROW SO CALL ME AT 9. LOVE ME XXX
ham Already am squatting is the new way of walking
ham Do you want bold 2 or bb torch
ham Cramps stopped. Going back to sleep
spam todays vodafone numbers ending with 0089(my last four digits) are selected to received a £350 award. If your number matches please call 09063442151 to claim your £350 award
spam Free Top ringtone -sub to weekly ringtone-get 1st week free-send SUBPOLY to 81618-?3 per week-stop sms-08718727870
ham Nan sonathaya soladha. Why boss?
ham Bring tat cd don forget
spam Sunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country the Algarve is in? Txt ansr to 82277. £1.50 SP:Tyrone
ham I don't know but I'm raping dudes at poker
ham Weightloss! No more girl friends. Make loads of money on ebay or something. And give thanks to God.
ham Was gr8 to see that message. So when r u leaving? Congrats dear. What school and wat r ur plans.
ham Ü eatin later but i'm eatin wif my frens now lei... Ü going home first?
ham Finish already... Yar they keep saying i mushy... I so embarrassed ok...
ham Sorry man, my stash ran dry last night and I can't pick up more until sunday
ham Hai priya are you right. What doctor said pa. Where are you.
spam Free msg. Sorry, a service you ordered from 81303 could not be delivered as you do not have sufficient credit. Please top up to receive the service.
ham Ok...
ham Please ask mummy to call father
ham Can come my room but cannot come my house cos my house still messy... Haha...
ham I have lost 10 kilos as of today!
ham Just taste fish curry :-P
ham What can i do? Might accidant tookplace between somewhere ghodbandar rd. Traffic moves slovely. So plz slip & don't worry.
ham Yun ah.now ü wkg where?btw if ü go nus sc. Ü wana specialise in wad?
ham Yes! I am a one woman man! Please tell me your likes and dislikes in bed...
ham Was doing my test earlier. I appreciate you. Will call you tomorrow.
ham How's my loverboy doing ? What does he do that keeps him from coming to his Queen, hmmm ? Doesn't he ache to speak to me ? Miss me desparately ?
ham U meet other fren dun wan meet me ah... Muz b a guy rite...
ham (No promises on when though, haven't even gotten dinner yet)
ham I got your back! Do you have any dislikes in bed?
ham o turns out i had stereo love on mi phone under the unknown album.
spam Hard LIVE 121 chat just 60p/min. Choose your girl and connect LIVE. Call 09094646899 now! Cheap Chat UK's biggest live service. VU BCM1896WC1N3XX
ham Yeah I don't see why not
ham Asking do u knw them or nt? May be ur frnds or classmates?
ham Sorry about earlier. Putting out fires.Are you around to talk after 9? Or do you actually have a life, lol!
spam WOW! The Boys R Back. TAKE THAT 2007 UK Tour. Win VIP Tickets & pre-book with VIP Club. Txt CLUB to 81303. Trackmarque Ltd info@vipclub4u.
ham As in missionary hook up, doggy hook up, standing...|
ham Then u better go sleep.. Dun disturb u liao.. U wake up then msg me lor..
ham Fighting with the world is easy, u either win or lose bt fightng with some1 who is close to u is dificult if u lose - u lose if u win - u still lose.
ham Am watching house – very entertaining – am getting the whole hugh laurie thing – even with the stick – indeed especially with the stick.
ham Thought praps you meant another one. Goodo! I'll look tomorrow
ham Hi Jon, Pete here, Ive bin 2 Spain recently & hav sum dinero left, Bill said u or ur rents mayb interested in it, I hav 12,000pes, so around £48, tb, James.
ham There bold 2 <#> . Is that yours
ham You know there is. I shall speak to you in <#> minutes then
ham "ALRITE HUNNY!WOT U UP 2 2NITE? DIDNT END UP GOIN DOWN TOWN JUS DA PUB INSTEAD! JUS CHILLIN AT DA MO IN ME BEDROOM!LOVE JEN XXX."
ham I went to project centre
ham As per your request 'Maangalyam (Alaipayuthe)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
ham Lol yeah at this point I guess not
ham Doing project w frens lor.
ham Lol. Well quality aint bad at all so i aint complaining
ham K, can that happen tonight?
spam Hi, this is Mandy Sullivan calling from HOTMIX FM...you are chosen to receive £5000.00 in our Easter Prize draw.....Please telephone 09041940223 to claim before 29/03/05 or your prize will be transferred to someone else....
ham I think we're going to finn's now, come
ham Why tired what special there you had
ham I will come tomorrow di
ham I cant pick the phone right now. Pls send a message
ham K go and sleep well. Take rest:-).
ham U guys never invite me anywhere :(
spam UR GOING 2 BAHAMAS! CallFREEFONE 08081560665 and speak to a live operator to claim either Bahamas cruise of£2000 CASH 18+only. To opt out txt X to 07786200117
ham I can do that! I want to please you both inside and outside the bedroom...
ham "EY! CALM DOWNON THEACUSATIONS.. ITXT U COS IWANA KNOW WOTU R DOIN AT THEW/END... HAVENTCN U IN AGES..RING ME IF UR UP4 NETHING SAT.LOVE J XXX."
ham I love to wine and dine my lady!
spam Someone has conacted our dating service and entered your phone because they fancy you!To find out who it is call from landline 09111030116. PoBox12n146tf15
ham Im cool ta luv but v.tired 2 cause i have been doin loads of planning all wk, we have got our social services inspection at the nursery! Take care & spk sn x.
ham I don know account details..i will ask my mom and send you.my mom is out of reach now.
ham I think u have the wrong number.
ham Feel Yourself That You Are Always Happy.. Slowly It Becomes Your Habit & Finally It Becomes Part Of Your Life.. Follow It.. Happy Morning & Have A Happy Day:)
ham DO NOT B LATE LOVE MUM
ham Got it..mail panren paru..
ham * Was thinking about chuckin ur red green n black trainners 2 save carryin them bac on train
ham Give one miss from that number please
ham Jus came back fr lunch wif my sis only. U leh?
ham How is your schedule next week? I am out of town this weekend.
ham Really good:)dhanush rocks once again:)
ham Lmao ok I wont be needing u to do my hair anymore.
ham Miss ya, need ya, want ya, love ya.
ham Sorry i'm not free...
ham Do u ever get a song stuck in your head for no reason and it won't go away til u listen to it like 5 times?
ham Nt yet chikku..simple habba..hw abt u?
ham Got ur mail Dileep.thank you so muchand look forward to lots of support...very less contacts here,remember one venugopal you mentioned.tomorrow if not late,i shall try to come up till there.goodnight dear.
ham Sometimes Heart Remembrs someone Very much... Forgets someone soon... Bcoz Heart will not like everyone. But liked ones will be Remembered Everytime... BSLVYL
ham Joy's father is John. Then John is the NAME of Joy's father. Mandan
spam Hi 07734396839 IBH Customer Loyalty Offer: The NEW NOKIA6600 Mobile from ONLY £10 at TXTAUCTION!Txt word:START to No:81151 & get Yours Now!4T&
ham Hi this is yijue... It's regarding the 3230 textbook it's intro to algorithms second edition... I'm selling it for $50...
spam SMS AUCTION You have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+
ham K, want us to come by now?
ham How. Its a little difficult but its a simple way to enter this place
ham Ha... Both of us doing e same thing. But i got tv 2 watch. U can thk of where 2 go tonight or u already haf smth in mind...
ham Dont show yourself. How far. Put new pictures up on facebook.
ham Watching tv now. I got new job :)
ham Good afternoon sexy buns! How goes the job search ? I wake and you are my first thought as always, my love. I wish your fine and happy and know I adore you!
ham I'm not coming over, do whatever you want
ham Its ok chikku, and its my 1 of favourite song..:-)
ham Did u see what I posted on your Facebook?
spam Call FREEPHONE 0800 542 0578 now!
spam Buy Space Invaders 4 a chance 2 win orig Arcade Game console. Press 0 for Games Arcade (std WAP charge) See o2.co.uk/games 4 Terms + settings. No purchase
ham 7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st "Ur Lovely Friendship"... good morning dear
spam Loan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'
spam BIG BROTHER ALERT! The computer has selected u for 10k cash or #150 voucher. Call 09064018838. NTT PO Box CRO1327 18+ BT Landline Cost 150ppm mobiles vary
ham ;-( oh well, c u later
ham My uncles in Atlanta. Wish you guys a great semester.
ham No dear i do have free messages without any recharge. Hi hi hi
ham Dont search love, let love find U. Thats why its called falling in love, bcoz U dont force yourself, U just fall and U know there is smeone to hold U... BSLVYL
ham I dun believe u. I thk u told him.
ham Do you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!
ham Yes:)sura in sun tv.:)lol.
ham Arun can u transfr me d amt
ham Takin a shower now but yeah I'll leave when I'm done
ham Am not working but am up to eyes in philosophy so will text u later when a bit more free for chat...
ham U havent lost me ill always b here 4u.i didnt intend 2 hurt u but I never knew how u felt about me when Iwas+marine&thats what itried2tell urmom.i careabout u
spam WIN: We have a winner! Mr. T. Foley won an iPod! More exciting prizes soon, so keep an eye on ur mobile or visit www.win-82050.co.uk
ham You bad girl. I can still remember them
ham How much i gave to you. Morning.
ham I hope your alright babe? I worry that you might have felt a bit desparate when you learned the job was a fake ? I am here waiting when you come back, my love
ham Hey, can you tell me blake's address? Carlos wanted me to meet him there but I got lost and he's not answering his phone
ham Can i get your opinion on something first?
ham That one week leave i put know that time. Why.
ham If we hit it off, you can move in with me :)
ham excellent. I spent <#> years in the Air Force. Iraq and afghanistan. I am stable and honest. do you like traveling?
ham I wanna watch that movie
ham Ok lor thanx... Ü in school?
ham I'm in class. Did you get my text.
ham The bus leaves at <#>
ham God bless.get good sleep my dear...i will pray!
spam Todays Voda numbers ending 1225 are selected to receive a £50award. If you have a match please call 08712300220 quoting claim code 3100 standard rates app
ham Do have a nice day today. I love you so dearly.
ham Aiyo a bit pai seh ü noe... Scared he dun rem who i am then die... Hee... But he become better lookin oredi leh...
ham Aight, I'll ask a few of my roommates
ham Now, whats your house # again ? And do you have any beer there ?
ham Do ü all wan 2 meet up n combine all the parts? How's da rest of da project going?
ham "Getting tickets 4 walsall tue 6 th march. My mate is getting me them on sat. ill pay my treat. Want 2 go. Txt bak .Terry"
ham Yes we are chatting too.
ham HI ITS JESS I DONT KNOW IF YOU ARE AT WORK BUT CALL ME WHEN U CAN IM AT HOME ALL EVE. XXX
ham Sian... Aft meeting supervisor got work 2 do liao... U working now?
ham Are you going to write ccna exam this week??
ham Well i will watch shrek in 3D!!B)
ham Am i that much dirty fellow?
ham Dunno dat's wat he told me. Ok lor...
ham I'll probably be by tomorrow (or even later tonight if something's going on)
ham I couldn't say no as he is a dying man and I feel sad for him so I will go and I just wanted you to know I would probably be gone late into your night
ham If you're thinking of lifting me one then no.
ham Same as u... Dun wan... Y u dun like me already ah... Wat u doing now? Still eating?
ham Sent me ur email id soon
ham Wat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!
ham Dude. What's up. How Teresa. Hope you have been okay. When i didnt hear from these people, i called them and they had received the package since dec <#> . Just thot you'ld like to know. Do have a fantastic year and all the best with your reading. Plus if you can really really Bam first aid for Usmle, then your work is done.
ham Hey gorgeous man. My work mobile number is. Have a good one babe. Squishy Mwahs.
ham May i call You later Pls
spam Hottest pics straight to your phone!! See me getting Wet and Wanting, just for you xx Text PICS to 89555 now! txt costs 150p textoperator g696ga 18 XxX
ham That's the way you should stay oh.
ham Hello- thanx for taking that call. I got a job! Starts on monday!
ham What time is ur flight tmr?
ham When should I come over?
ham I have a rather prominent bite mark on my right cheek
ham * Will be september by then!
ham Are you wet right now?
ham And how's your husband.
spam Hack Chat. Get backdoor entry into 121 chat rooms at a fraction of the cost. Reply NEO69 or call 09050280520, to subscribe 25p pm. DPS, Bcm box 8027 Ldn, wc1n3xx
ham Are we doing the norm tomorrow? I finish just a 4.15 cos of st tests. Need to sort library stuff out at some point tomo - got letter from today - access til end march so i better get move on!
ham Yeah. I got a list with only u and Joanna if I'm feeling really anti social
ham I am in your office na.
ham "Are you comingdown later?"
ham Super da:)good replacement for murali
ham Da is good good player.why he is unsold.
ham Hi. || Do u want | to join me with sts later? || Meeting them at five. || Call u after class.
ham Its on in engalnd! But telly has decided it won't let me watch it and mia and elliot were kissing! Damn it!
spam FREE NOKIA Or Motorola with upto 12mths 1/2price linerental, 500 FREE x-net mins&100txt/mth FREE B'tooth*. Call Mobileupd8 on 08001950382 or call 2optout/D3WV
ham I dont want to hear philosophy. Just say what happen
ham You got job in wipro:)you will get every thing in life in 2 or 3 years.
ham Then cant get da laptop? My matric card wif ü lei...
ham Dunno da next show aft 6 is 850. Toa payoh got 650.
spam This is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08718726970 NOW! Only 10p per min. BT-national-rate
ham I just made some payments so dont have that much. Sorry. Would you want it fedex or the other way.
ham They did't play one day last year know even though they have very good team.. Like india.
ham K.:)you are the only girl waiting in reception ah?
ham Say this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...
ham I hate when she does this. She turns what should be a fun shopping trip into an annoying day of how everything would look in her house.
ham Sir, i am waiting for your call.
ham What's up. Do you want me to come online?
ham It could work, we'll reach a consensus at the next meeting
ham Aiyah then i wait lor. Then u entertain me. Hee...
ham The last thing i ever wanted to do was hurt you. And i didn't think it would have. You'd laugh, be embarassed, delete the tag and keep going. But as far as i knew, it wasn't even up. The fact that you even felt like i would do it to hurt you shows you really don't know me at all. It was messy wednesday, but it wasn't bad. The problem i have with it is you HAVE the time to clean it, but you choose not to. You skype, you take pictures, you sleep, you want to go out. I don't mind a few things here and there, but when you don't make the bed, when you throw laundry on top of it, when i can't have a friend in the house because i'm embarassed that there's underwear and bras strewn on the bed, pillows on the floor, that's something else. You used to be good about at least making the bed.
ham I'll let you know when it kicks in
ham You call him now ok i said call him
ham Call to the number which is available in appointment. And ask to connect the call to waheed fathima.
ham Or ü go buy wif him then i meet ü later can?
ham Mmmm ... Fuck ... Not fair ! You know my weaknesses ! *grins* *pushes you to your knee's* *exposes my belly and pulls your head to it* Don't forget ... I know yours too *wicked smile*
ham Today my system sh get ready.all is well and i am also in the deep well
ham Mom wants to know where you at
ham Aight, I'll text you when I'm back
ham Dont know supports ass and srt i thnk. I think ps3 can play through usb too
ham Oh ok i didnt know what you meant. Yep i am baby jontin
spam You have WON a guaranteed £1000 cash or a £2000 prize.To claim yr prize call our customer service representative on
spam Would you like to see my XXX pics they are so hot they were nearly banned in the uk!
spam HMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com
ham Watching tv now. I got new job :)
ham This pen thing is beyond a joke. Wont a Biro do? Don't do a masters as can't do this ever again!
ham I AM AT A PARTY WITH ALEX NICHOLS
spam U have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094594
ham Just seeing your missed call my dear brother. Do have a gr8 day.
ham Ok.. Ü finishing soon?
ham Sorry, I can't help you on this.
ham Come to me, slave. Your doing it again ... Going into your shell and unconsciously avoiding me ... You are making me unhappy :-(
ham I love your ass! Do you enjoy doggy style? :)
ham I think asking for a gym is the excuse for lazy people. I jog.
spam Dear 0776xxxxxxx U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs
spam Urgent! Please call 09061743811 from landline. Your ABTA complimentary 4* Tenerife Holiday or £5000 cash await collection SAE T&Cs Box 326 CW25WX 150ppm
ham No. On the way home. So if not for the long dry spell the season would have been over
ham I gotta collect da car at 6 lei.
ham Ok but knackered. Just came home and went to sleep! Not good at this full time work lark.
ham Probably earlier than that if the station's where I think it is
spam CALL 09090900040 & LISTEN TO EXTREME DIRTY LIVE CHAT GOING ON IN THE OFFICE RIGHT NOW TOTAL PRIVACY NO ONE KNOWS YOUR [sic] LISTENING 60P MIN 24/7MP 0870753331018+
ham Good Morning plz call me sir
spam FreeMsg Hey U, i just got 1 of these video/pic fones, reply WILD to this txt & ill send U my pics, hurry up Im so bored at work xxx (18 150p/rcvd STOP2stop)
ham Uh, heads up we don't have THAT much left
ham I tot u outside cos darren say u come shopping. Of course we nice wat. We jus went sim lim look at mp3 player.
ham Aight, sounds good. When do you want me to come down?
ham Wat would u like 4 ur birthday?
ham I love working from home :)
ham And miss vday the parachute and double coins??? U must not know me very well...
ham Sorry, I'll call later
ham My sister got placed in birla soft da:-)
spam Free entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+
ham Wah... Okie okie... Muz make use of e unlimited... Haha...
ham There're some people by mu, I'm at the table by lambda
ham And stop being an old man. You get to build snowman snow angels and snowball fights.
ham ELLO BABE U OK?
ham Hello beautiful r u ok? I've kinda ad a row wiv and he walked out the pub?? I wanted a night wiv u Miss u
ham Then u going ikea str aft dat?
ham Becoz its <#> jan whn al the post ofice is in holiday so she cn go fr the post ofice...got it duffer
ham Lol grr my mom is taking forever with my prescription. Pharmacy is like 2 minutes away. Ugh.
ham For real tho this sucks. I can't even cook my whole electricity is out. And I'm hungry.
ham You want to go?
spam New TEXTBUDDY Chat 2 horny guys in ur area 4 just 25p Free 2 receive Search postcode or at gaytextbuddy.com. TXT ONE name to 89693. 08715500022 rpl Stop 2 cnl
ham Its not that time of the month nor mid of the time?
ham Fffff. Can you text kadeem or are you too far gone
ham We not leaving yet. Ok lor then we go elsewhere n eat. U thk...
ham Is fujitsu s series lifebook good?
ham Yar i wanted 2 scold u yest but late already... I where got zhong se qing you? If u ask me b4 he ask me then i'll go out w u all lor. N u still can act so real.
ham Dont know you bring some food
ham No current and food here. I am alone also
ham I'll be in sch fr 4-6... I dun haf da book in sch... It's at home...
ham Hello. They are going to the village pub at 8 so either come here or there accordingly. Ok?
ham Ok
ham We don call like <#> times oh. No give us hypertension oh.
ham Dont give a monkeys wot they think and i certainly don't mind. Any friend of mine&all that! Just don't sleep wiv , that wud be annoyin!
ham Omg it could snow here tonite!
spam Call from 08702490080 - tells u 2 call 09066358152 to claim £5000 prize. U have 2 enter all ur mobile & personal details @ the prompts. Careful!
spam Free 1st week entry 2 TEXTPOD 4 a chance 2 win 40GB iPod or £250 cash every wk. Txt VPOD to 81303 Ts&Cs www.textpod.net custcare 08712405020.
ham Carry on not disturbing both of you
ham What pa tell me.. I went to bath:-)
ham Jus finished avatar nigro
ham R u over scratching it?
ham Hope you are having a great day.
ham Did either of you have any idea's? Do you know of anyplaces doing something?
ham My planning usually stops at "find hella weed, smoke hella weed"
ham The fact that you're cleaning shows you know why i'm upset. Your priority is constantly "what i want to do," not "what i need to do."
ham Excellent! Are you ready to moan and scream in ecstasy?
spam More people are dogging in your area now. Call 09090204448 and join like minded guys. Why not arrange 1 yourself. There's 1 this evening. A£1.50 minAPN LS278BB
ham Dude avatar 3d was imp. At one point i thought there were actually flies in the room and almost tried hittng one as a reflex
spam WELL DONE! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TCs, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham K...k:)why cant you come here and search job:)
ham I got lousy sleep. I kept waking up every 2 hours to see if my cat wanted to come in. I worry about him when its cold :(
ham Yeah, I'll leave in a couple minutes & let you know when I get to mu
ham Can ü call me at 10:10 to make sure dat i've woken up...
ham Hey we can go jazz power yoga hip hop kb and yogasana
ham The battery is for mr adewale my uncle. Aka Egbon
ham I cant pick the phone right now. Pls send a message
ham Wait 2 min..stand at bus stop
ham Oh ic. I thought you meant mary jane.
ham Haha... Really oh no... How? Then will they deduct your lesson tmr?
ham Nah im goin 2 the wrks with j wot bout u?
ham Then just eat a shit and wait for ur monkey face bitch.......... U asshole..................
ham Good night. Am going to sleep.
ham Aight I'll grab something to eat too, text me when you're back at mu
ham K...k:)why cant you come here and search job:)
ham Take something for pain. If it moves however to any side in the next 6hrs see a doctor.
ham Lol ... Oh no babe, I wont be sliding into your place after midnight, but thanks for the invite
ham Howz that persons story
spam Guess what! Somebody you know secretly fancies you! Wanna find out who it is? Give us a call on 09065394973 from Landline DATEBox1282EssexCM61XN 150p/min 18
ham LOL that would be awesome payback.
spam it to 80488. Your 500 free text messages are valid until 31 December 2005.
ham Yes :)it completely in out of form:)clark also utter waste.
ham Honeybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)
ham Thanks. It was only from tescos but quite nice. All gone now. Speak soon
ham What's a feathery bowa? Is that something guys have that I don't know about?
ham Even i cant close my eyes you are in me our vava playing umma :-D
ham 2 laptop... I noe infra but too slow lar... I wan fast one
spam You have won a guaranteed £200 award or even £1000 cashto claim UR award call free on 08000407165 (18+) 2 stop getstop on 88222 PHP
ham Nvm it's ok...
ham Enjoy ur life. . Good night
ham Yes but can we meet in town cos will go to gep and then home. You could text at bus stop. And don't worry we'll have finished by march … ish!
ham I had askd u a question some hours before. Its answer
ham Thats cool. Where should i cum? On you or in you? :)
ham Delhi and chennai still silent.
ham Lol alright i was thinkin that too haha
spam Reply to win £100 weekly! Where will the 2006 FIFA World Cup be held? Send STOP to 87239 to end service
ham No I'm in the same boat. Still here at my moms. Check me out on yo. I'm half naked.
ham Shhhhh nobody is supposed to know!
ham Sorry, I'll call later
ham Sorry, I'll call later in meeting any thing related to trade please call Arul. <#>
ham Hey i will be late... i'm at amk. Need to drink tea or coffee
ham I wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob
spam Urgent! Please call 09061743810 from landline. Your ABTA complimentary 4* Tenerife Holiday or #5000 cash await collection SAE T&Cs Box 326 CW25WX 150 ppm
ham The length is e same but e top shorter n i got a fringe now. I thk i'm not going liao. Too lazy. Dun wan 2 distract u also.
ham S..antha num corrct dane
ham No calls..messages..missed calls
ham Sorry, I'll call later
ham The basket's gettin full so I might be by tonight
ham HI DARLIN IVE JUST GOT BACK AND I HAD A REALLY NICE NIGHT AND THANKS SO MUCH FOR THE LIFT SEE U TOMORROW XXX
ham No other Valentines huh? The proof is on your fb page. Ugh I'm so glad I really DIDN'T watch your rupaul show you TOOL!
spam Free tones Hope you enjoyed your new content. text stop to 61610 to unsubscribe. help:08712400602450p Provided by tones2you.co.uk
ham Eh den sat u book e kb liao huh...
ham Have you been practising your curtsey?
ham Shall i come to get pickle
ham Lol boo I was hoping for a laugh
ham "YEH I AM DEF UP4 SOMETHING SAT,JUST GOT PAYED2DAY & I HAVBEEN GIVEN A£50 PAY RISE 4MY WORK & HAVEBEEN MADE PRESCHOOLCO-ORDINATOR 2I AM FEELINGOOD LUV"
ham Well, I have to leave for my class babe ... You never came back to me ... :-( ... Hope you have a nice sleep, my love
ham LMAO where's your fish memory when I need it?
ham But i'll b going 2 sch on mon. My sis need 2 take smth.
ham Idea will soon get converted to live:)
spam TheMob>Yo yo yo-Here comes a new selection of hot downloads for our members to get for FREE! Just click & open the next link sent to ur fone...
ham S....s...india going to draw the series after many years in south african soil..
ham Goodmorning, today i am late for <DECIMAL> min.
ham Can't take any major roles in community outreach. You rock mel
ham Shopping lor. Them raining mah hard 2 leave orchard.
ham Hi here. have birth at on the to at 8lb 7oz. Mother and baby doing brilliantly.
ham See the forwarding message for proof
ham I can't keep going through this. It was never my intention to run you out, but if you choose to do that rather than keep the room clean so *I* don't have to say no to visitors, then maybe that's the best choice. Yes, I wanted you to be embarassed, so maybe you'd feel for once how I feel when i have a friend who wants to drop buy and i have to say no, as happened this morning. I've tried everything. I don't know what else to do.
ham Dunno lei... I thk mum lazy to go out... I neva ask her yet...
ham Do whatever you want. You know what the rules are. We had a talk earlier this week about what had to start happening, you showing responsibility. Yet, every week it's can i bend the rule this way? What about that way? Do whatever. I'm tired of having thia same argument with you every week. And a <#> movie DOESNT inlude the previews. You're still getting in after 1.
ham Beautiful Truth against Gravity.. Read carefully: "Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it.." GOODMORNING
spam Great News! Call FREEFONE 08006344447 to claim your guaranteed £1000 CASH or £2000 gift. Speak to a live operator NOW!
ham Ambrith..madurai..met u in arun dha marrge..remembr?
ham Just re read it and I have no shame but tell me how he takes it and if he runs I will blame u 4 ever!! Not really 4 ever just a long time
ham Princess, is your kitty shaved or natural?
ham Better than bb. If he wont use it, his wife will or them doctor
ham Ya it came a while ago
ham From tomorrow onwards eve 6 to 3 work.
ham Anything lor but toa payoh got place 2 walk meh...
ham I don't have anybody's number, I still haven't thought up a tactful way to ask alex
spam U can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.ldew.com SkillGame,1Winaweek, age16.150ppermessSubscription
ham Is there any movie theatre i can go to and watch unlimited movies and just pay once?
ham U having lunch alone? I now so bored...
ham Yes obviously, but you are the eggs-pert and the potato head… Speak soon!
ham Nah man, my car is meant to be crammed full of people
ham No got new job at bar in airport on satsgettin 4.47per hour but means no lie in! keep in touch
ham Kallis is ready for bat in 2nd innings
ham Thanx but my birthday is over already.
ham Ugh y can't u just apologize, admit u were wrong and ask me to take u back?
ham I noe la... U wana pei bf oso rite... K lor, other days den...
ham Yes, i'm small kid.. And boost is the secret of my energy..
ham IM GONNA MISS U SO MUCH
ham Is avatar supposed to have subtoitles
ham Simply sitting and watching match in office..
ham You can jot down things you want to remember later.
ham Oh sorry please its over
ham Hey are we going for the lo lesson or gym?
ham Dont pack what you can buy at any store.like cereals. If you must pack food, pack gari or something 9ja that you will miss.
ham You always make things bigger than they are
ham Ü dun wan to watch infernal affair?
ham Me not waking up until 4 in the afternoon, sup
spam 4mths half price Orange line rental & latest camera phones 4 FREE. Had your phone 11mths ? Call MobilesDirect free on 08000938767 to update now! or2stoptxt
ham I can send you a pic if you like :)
ham Okay... I booked all already... Including the one at bugis.
ham Aight fuck it, I'll get it later
ham No de. But call me after some time. Ill tell you k
ham So dont use hook up any how
ham How much is blackberry bold2 in nigeria.
ham Hi where you. You in home or calicut?
ham Hey darlin.. i can pick u up at college if u tell me wen & where 2 mt.. love Pete xx
spam Call 09094100151 to use ur mins! Calls cast 10p/min (mob vary). Service provided by AOM, just GBP5/month. AOM Box61,M60 1ER until u stop. Ages 18+ only!
ham Oh... I was thkin of goin yogasana at 10 den no nd to go at 3 den can rush to parco 4 nb... Okie lor, u call me when ready...
ham Y so late but i need to go n get da laptop...
ham Sir, I am waiting for your mail.
ham .Please charge my mobile when you get up in morning.
ham Nothing, i got msg frm tht unknown no..
ham Ugh fuck it I'm resubbing to eve
ham He didn't see his shadow. We get an early spring yay
ham I did. One slice and one breadstick. Lol
ham Hey ! I want you ! I crave you ! I miss you ! I need you ! I love you, Ahmad Saeed al Hallaq ...
ham Is there any training tomorrow?
spam URGENT! Your mobile No *********** WON a £2,000 Bonus Caller Prize on 02/06/03! This is the 2nd attempt to reach YOU! Call 09066362220 ASAP! BOX97N7QP, 150ppm
ham Pass dis to all ur contacts n see wat u get! Red;i'm in luv wid u. Blue;u put a smile on my face. Purple;u r realy hot. Pink;u r so swt. Orange;i thnk i lyk u. Green;i realy wana go out wid u. Yelow;i wnt u bck. Black;i'm jealous of u. Brown;i miss you Nw plz giv me one color
ham Cos daddy arranging time c wat time fetch ü mah...
ham Then. You are eldest know.
ham Who's there say hi to our drugdealer
ham Its hard to believe things like this. All can say lie but think twice before saying anything to me.
spam Eerie Nokia tones 4u, rply TONE TITLE to 8007 eg TONE DRACULA to 8007 Titles: GHOST, ADDAMSFA, MUNSTERS, EXORCIST, TWILIGHT www.getzed.co.uk POBox36504W45WQ 150p
spam Sexy Singles are waiting for you! Text your AGE followed by your GENDER as wither M or F E.G.23F. For gay men text your AGE followed by a G. e.g.23G.
ham Good night my dear.. Sleepwell&Take care
ham That is wondarfull song
spam FreeMsg: Claim ur 250 SMS messages-Text OK to 84025 now!Use web2mobile 2 ur mates etc. Join Txt250.com for 1.50p/wk. T&C BOX139, LA32WU. 16 . Remove txtX or stop
ham Yar lor actually we quite fast... Cos da ge slow wat... Haha...
ham Must come later.. I normally bathe him in da afternoon mah..
ham Trust me. Even if isn't there, its there.
ham Hey hun-onbus goin 2 meet him. He wants 2go out 4a meal but I donyt feel like it cuz have 2 get last bus home!But hes sweet latelyxxx
spam 85233 FREE>Ringtone!Reply REAL
ham I can take you at like noon
ham Where is it. Is there any opening for mca.
ham I'm aight. Wat's happening on your side.
ham I'm done oredi...
ham you are sweet as well, princess. Please tell me your likes and dislikes in bed...
ham How are you. Wish you a great semester
ham Moji i love you more than words. Have a rich day
ham Dude how do you like the buff wind.
ham "alright babe, justthought id sayhey! how u doin?nearly the endof me wk offdam nevamind!We will have 2Hook up sn if uwant m8? loveJen x."
spam Well done ENGLAND! Get the official poly ringtone or colour flag on yer mobile! text TONE or FLAG to 84199 NOW! Opt-out txt ENG STOP. Box39822 W111WX £1.50
ham No i'm not. I can't give you everything you want and need. You actually could do better for yourself on yor own--you've got more money than i do. I can't get work, i can't get a man, i can't pay the rent, i can't even fill my fucking gas tank. yes, i'm stressed and depressed. I didn't even call home for thanksgiving cuz i'll have to tell them i,m up to nothing.
ham S:-)kallis wont play in first two odi:-)
ham Then get some cash together and I'll text jason
ham Oh, my love, it's soooo good to hear from you. Omg I missed you so much today. I'm sorry your having problems with the provider but thank you for tming me
spam Final Chance! Claim ur £150 worth of discount vouchers today! Text YES to 85023 now! SavaMob, member offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Subs 16
spam PRIVATE! Your 2004 Account Statement for 07742676969 shows 786 unredeemed Bonus Points. To claim call 08719180248 Identifier Code: 45239 Expires
ham Probably, want to pick up more?
ham I'm done...
ham Are you the cutest girl in the world or what
ham No dice, art class 6 thru 9 :( thanks though. Any idea what time I should come tomorrow?
spam SMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= ***** unsubscribe with STOP. no extra charge. help:08700469649. PO BOX420. IP4 5WE
ham Oh Howda gud gud.. Mathe en samachara chikku:-)
ham I thk 530 lor. But dunno can get tickets a not. Wat u doing now?
ham Audrie lousy autocorrect
ham Its a site to simulate the test. It just gives you very tough questions to test your readiness.
ham Anyway seriously hit me up when you're back because otherwise I have to light up with armand and he always has shit and/or is vomiting
ham I fetch yun or u fetch?
ham Thank you. I like you as well...
ham Hmmm ... And imagine after you've come home from that having to rub my feet, make me dinner and help me get ready for my date ! Are you sure your ready for that kind of life ?
spam FREE2DAY sexy St George's Day pic of Jordan!Txt PIC to 89080 dont miss out, then every wk a saucy celeb!4 more pics c PocketBabe.co.uk 0870241182716 £3/wk
ham Lara said she can loan me <#> .
ham Do we have any spare power supplies
ham Yar he quite clever but aft many guesses lor. He got ask me 2 bring but i thk darren not so willing 2 go. Aiya they thk leona still not attach wat.
spam You are a winner you have been specially selected to receive £1000 cash or a £2000 award. Speak to a live operator to claim call 087123002209am-7pm. Cost 10p
ham Yeah, don't go to bed, I'll be back before midnight
spam Sunshine Hols. To claim ur med holiday send a stamped self address envelope to Drinks on Us UK, PO Box 113, Bray, Wicklow, Eire. Quiz Starts Saturday! Unsub Stop
ham Well I wasn't available as I washob nobbing with last night so they had to ask Nickey Platt instead of me!;
ham It's that time of the week again, ryan
ham Wish u many many returns of the day.. Happy birthday vikky..
spam U can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.Idew.com SkillGame, 1Winaweek, age16. 150ppermessSubscription
ham I hope you know I'm still mad at you.
ham Argh my 3g is spotty, anyway the only thing I remember from the research we did was that province and sterling were the only problem-free places we looked at
ham In xam hall boy asked girl Tell me the starting term for dis answer I can den manage on my own After lot of hesitation n lookin around silently she said THE! intha ponnungale ipaditan;)
ham Do you know when the result.
spam +123 Congratulations - in this week's competition draw u have won the £1450 prize to claim just call 09050002311 b4280703. T&Cs/stop SMS 08718727868. Over 18 only 150ppm
ham Beautiful Truth against Gravity.. Read carefully: "Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it.." GOOD NIGHT
ham Sorry im getting up now, feel really bad- totally rejected that kinda me thing.
ham You do got a shitload of diamonds though
ham Tessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas
ham Well I'm going to be an aunty!
ham Mine here like all fr china then so noisy.
ham Later i guess. I needa do mcat study too.
ham S...from the training manual it show there is no tech process:)its all about password reset and troubleshooting:)
spam Your B4U voucher w/c 27/03 is MARSMS. Log onto www.B4Utele.com for discount credit. To opt out reply stop. Customer care call 08717168528
ham Spoke with uncle john today. He strongly feels that you need to sacrifice to keep me here. He's going to call you. When he does, i beg you to just listen. Dont make any promises or make it clear things are not easy. And i need you to please let us work things out. As long as i keep expecting help, my creativity will be stifled so pls just keep him happy, no promises on your part.
ham If he started searching he will get job in few days.he have great potential and talent.
ham Carlos took a while (again), we leave in a minute
ham Well done and ! luv ya all
ham Then why you came to hostel.
ham K still are you loving me.
ham But i juz remembered i gotta bathe my dog today..
ham After the drug she will be able to eat.
ham Alright took the morphine. Back in yo.
ham You see the requirements please
ham You stayin out of trouble stranger!!saw Dave the other day hes sorted now!still with me bloke when u gona get a girl MR!ur mum still Thinks we will get 2GETHA!
spam FreeMsg: Hey - I'm Buffy. 25 and love to satisfy men. Home alone feeling randy. Reply 2 C my PIX! QlynnBV Help08700621170150p a msg Send stop to stop txts
spam Sunshine Hols. To claim ur med holiday send a stamped self address envelope to Drinks on Us UK, PO Box 113, Bray, Wicklow, Eire. Quiz Starts Saturday! Unsub Stop
ham So can collect ur laptop?
ham Ok. Can be later showing around 8-8:30 if you want + cld have drink before. Wld prefer not to spend money on nosh if you don't mind, as doing that nxt wk.
ham I will once i get home
ham Waaaat?? Lololo ok next time then!
ham The table's occupied, I'm waiting by the tree
ham I surely dont forgot to come:)i will always be in touch in with you:-)
ham Hi kindly give us back our documents which we submitted for loan from STAPATI
ham I dont have i shall buy one dear
ham Oh god i am happy to see your message after 3 days
ham What year. And how many miles.
ham Hey cutie. How goes it? Here in WALES its kinda ok. There is like hills and shit but i still avent killed myself.
ham Sad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish. As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there. She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife, My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!
ham I think you should go the honesty road. Call the bank tomorrow. Its the tough decisions that make us great people.
spam FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 87077 Get txting and tell ur mates. zed POBox 36504 W45WQ norm150p/tone 16+
ham No. Its not specialisation. Can work but its slave labor. Will look for it this month sha cos no shakara 4 beggar.
ham Is she replying. Has boye changed his phone number
ham 1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)
ham hi my darlin im on my way to London and we have just been smashed into by another driver! and have a big dent! im really missing u what have u been up to? xxx
ham Nothing really, just making sure everybody's up to speed
ham I'm not coming home 4 dinner.
ham Thank you. And by the way, I just lost.
ham Yes.he have good crickiting mind
ham Thx. All will be well in a few months
spam Shop till u Drop, IS IT YOU, either 10K, 5K, £500 Cash or £100 Travel voucher, Call now, 09064011000. NTT PO Box CR01327BT fixedline Cost 150ppm mobile vary
ham "CAN I PLEASE COME UP NOW IMIN TOWN.DONTMATTER IF URGOIN OUTL8R,JUST REALLYNEED 2DOCD.PLEASE DONTPLEASE DONTIGNORE MYCALLS,U NO THECD ISV.IMPORTANT TOME 4 2MORO"
ham I wont. So wat's wit the guys
ham Yavnt tried yet and never played original either
ham Hiya, had a good day? Have you spoken to since the weekend?
ham See? I thought it all through
ham I'm at work. Please call
ham get ready to moan and scream :)
ham Oh k :)why you got job then whats up?
ham I don,t think so. You don't need to be going out that late on a school night. ESPECIALLY when the one class you have is the one you missed last wednesday and probably failed a test in on friday
ham And popping <#> ibuprofens was no help.
ham Babe ! How goes that day ? What are you doing ? Where are you ? I sip my cappuccino and think of you, my love ... I send a kiss to you from across the sea
ham Ok.
ham PS U no ur a grown up now right?
ham Chinatown got porridge, claypot rice, yam cake, fishhead beehoon... Either we eat cheap den go cafe n tok or go nydc or somethin...
ham I know a few people I can hit up and fuck to the yes
ham Purity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng
ham So is there anything specific I should be doing with regards to jaklin or what because idk what the fuck
ham Oh god. I'm gonna Google nearby cliffs now.
spam FREE camera phones with linerental from 4.49/month with 750 cross ntwk mins. 1/2 price txt bundle deals also avble. Call 08001950382 or call2optout/J MF
ham Yup i shd haf ard 10 pages if i add figures... Ü all got how many pages?
ham Ooh, 4got, i'm gonna start belly dancing in moseley weds 6.30 if u want 2 join me, they have a cafe too.
ham Thankyou so much for the call. I appreciate your care.
ham Congrats ! Treat pending.i am not on mail for 2 days.will mail once thru.Respect mother at home.check mails.
ham I called but no one pick up e phone. I ask both of them already they said ok.
ham Hi my email address has changed now it is
ham V-aluable. A-ffectionate. L-oveable. E-ternal. N-oble. T-ruthful. I-ntimate. N-atural. E-namous. Happy "VALENTINES DAY" in advance
ham Not much, just some textin'. How bout you?
ham Bring it if you got it
ham I'm in a movie. Call me 4 wat?
ham Not sure I have the stomach for it ...
ham Haha... can... But i'm having dinner with my cousin...
ham A boy was late 2 home. His father: "POWER OF FRNDSHIP"
ham (And my man carlos is definitely coming by mu tonight, no excuses)
ham soon you will have the real thing princess! Do i make you wet? :)
ham Raji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.
ham Haha, my legs and neck are killing me and my amigos are hoping to end the night with a burn, think I could swing by in like an hour?
spam URGENT! Your mobile No 07xxxxxxxxx won a £2,000 bonus caller prize on 02/06/03! this is the 2nd attempt to reach YOU! call 09066362231 ASAP! BOX97N7QP, 150PPM
ham Usually the body takes care of it buy making sure it doesnt progress. Can we pls continue this talk on saturday.
spam URGENT!! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham Hmm well, night night
ham Just wanted to say holy shit you guys weren't kidding about this bud
ham Just gettin a bit arty with my collages at the mo, well tryin 2 ne way! Got a roast in a min lovely i shall enjoy that!
ham This is one of the days you have a billion classes, right?
ham Goodmorning, today i am late for 2hrs. Because of back pain.
ham Ok then i'll let him noe later n ask him call u tmr...
ham Prabha..i'm soryda..realy..frm heart i'm sory
ham OK i'm waliking ard now... Do u wan me 2 buy anything go ur house?
ham * Will have two more cartons off u and is very pleased with shelves
ham Nice talking to you! please dont forget my pix :) i want to see all of you...
spam You have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712379 between 10am-7pm Cost 10p
ham But really quite funny lor wat... Then u shd haf run shorter distance wat...
ham I notice you like looking in the shit mirror youre turning into a right freak
ham Great. I was getting worried about you. Just know that a wonderful and caring person like you will have only the best in life. Know that u r wonderful and God's love is yours.
spam Thanks for your ringtone order, ref number K718. Your mobile will be charged £4.50. Should your tone not arrive please call customer services on 09065069120
ham I prefer my free days... Tues, wed, fri oso can... Ü ask those workin lor...
ham Alrite jod hows the revision goin? Keris bin doin a smidgin. N e way u wanna cum over after college?xx
ham If you have belive me. Come to my home.
ham Oh k.k..where did you take test?
ham Those were my exact intentions
ham haha but no money leh... Later got to go for tuition... Haha and looking for empty slots for driving lessons
ham Hey... Thk we juz go accordin to wat we discussed yest lor, except no kb on sun... Cos there's nt much lesson to go if we attend kb on sat...
ham K, wen ur free come to my home and also tel vikky i hav sent mail to him also.. Better come evening il be free today aftr 6pm..:-)
ham Nothing just getting msgs by dis name wit different no's..
ham Good Morning plz call me sir
ham What's your room number again? Wanna make sure I'm knocking on the right door
ham "Si.como no?!listened2the plaid album-quite gd&the new air1 which is hilarious-also boughtbraindancea comp.ofstuff on aphexs ;abel,u hav2hear it!c u sn xxxx"
ham Pls tell nelson that the bb's are no longer comin. The money i was expecting aint coming
ham Give her something to drink, if she takes it and doesn't vomit then you her temp might drop. If she unmits however let me know.
ham Think you sent the text to the home phone. That cant display texts. If you still want to send it his number is
ham Every day i use to sleep after <#> so only.
ham K I'll call you when I'm close
ham U buy newspapers already?
ham Nope wif my sis lor... Aft bathing my dog then i can bathe... Looks like it's going 2 rain soon.
ham Boo I'm on my way to my moms. She's making tortilla soup. Yummmm
ham No management puzzeles.
ham How did you find out in a way that didn't include all of these details
spam Hi ya babe x u 4goten bout me?' scammers getting smart..Though this is a regular vodafone no, if you respond you get further prem rate msg/subscription. Other nos used also. Beware!
spam Back 2 work 2morro half term over! Can U C me 2nite 4 some sexy passion B4 I have 2 go back? Chat NOW 09099726481 Luv DENA Calls £1/minMobsmoreLKPOBOX177HP51FL
ham will you like to be spoiled? :)
spam Thanks for your ringtone order, ref number R836. Your mobile will be charged £4.50. Should your tone not arrive please call customer services on 09065069154
ham I am getting threats from your sales executive Shifad as i raised complaint against him. Its an official message.
ham hope things went well at 'doctors' ;) reminds me i still need 2go.did u c d little thing i left in the lounge?
ham Den wat will e schedule b lk on sun?
ham Lol enjoy role playing much?
ham Ok. Me watching tv too.
ham I just lov this line: "Hurt me with the truth, I don't mind,i wil tolerat.bcs ur my someone..... But, Never comfort me with a lie" gud ni8 and sweet dreams
ham Just checked out, heading out to drop off my stuff now
ham Here got lots of hair dresser fr china.
ham Sad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.
ham Ill call you evening ill some ideas.
spam SplashMobile: Choose from 1000s of gr8 tones each wk! This is a subscrition service with weekly tones costing 300p. U have one credit - kick back and ENJOY
ham Did you show him and wot did he say or could u not c him 4 dust?
ham It should take about <#> min
spam Not heard from U4 a while. Call 4 rude chat private line 01223585334 to cum. Wan 2C pics of me gettin shagged then text PIX to 8552. 2End send STOP 8552 SAM xxx
ham Ok . . now i am in bus. . If i come soon i will come otherwise tomorrow
ham I cant pick the phone right now. Pls send a message
spam <Forwarded from 88877>FREE entry into our £250 weekly comp just send the word ENTER to 88877 NOW. 18 T&C www.textcomp.com
ham Finish liao... U?
spam 88066 FROM 88066 LOST 3POUND HELP
ham Haha i think i did too
ham U know we watchin at lido?
ham Life spend with someone for a lifetime may be meaningless but a few moments spent with someone who really love you means more than life itself..
ham Haha awesome, I've been to 4u a couple times. Who all's coming?
ham Cold. Dont be sad dear
ham Think I could stop by in like an hour or so? My roommate's looking to stock up for a trip
ham Is that on the telly? No its Brdget Jones!
ham Love you aathi..love u lot..
ham Hello! How r u? Im bored. Inever thought id get bored with the tv but I am. Tell me something exciting has happened there? Anything! =/
ham Hmm...Bad news...Hype park plaza $700 studio taken...Only left 2 bedrm-$900...
ham Sorry, I'll call later in meeting
ham R ü comin back for dinner?
ham I hav almost reached. Call, i m unable to connect u.
ham Whom you waited for yesterday
ham I reach home safe n sound liao...
ham Velly good, yes please!
ham Hi, wkend ok but journey terrible. Wk not good as have huge back log of marking to do
ham I have had two more letters from . I will copy them for you cos one has a message for you. Speak soon
ham Alex knows a guy who sells mids but he's down in south tampa and I don't think I could set it up before like 8
ham Dont you have message offer
spam Had your mobile 11mths ? Update for FREE to Oranges latest colour camera mobiles & unlimited weekend calls. Call Mobile Upd8 on freefone 08000839402 or 2StopTx
ham HEY THERE BABE, HOW U DOIN? WOT U UP 2 2NITE LOVE ANNIE X.
ham Remind me how to get there and I shall do so
ham :-( that's not v romantic!
ham Hello. Damn this christmas thing. I think i have decided to keep this mp3 that doesnt work.
spam You have 1 new message. Please call 08718738034.
ham HI DARLIN IM MISSIN U HOPE YOU ARE HAVING A GOOD TIME. WHEN ARE U BACK AND WHAT TIME IF U CAN GIVE ME A CALL AT HOME. JESS XX
spam <Forwarded from 21870000>Hi - this is your Mailbox Messaging SMS alert. You have 4 messages. You have 21 matches. Please call back on 09056242159 to retrieve your messages and matches
ham Draw va?i dont think so:)
ham Dont pick up d call when something important is There to tell. Hrishi
spam Congrats! 1 year special cinema pass for 2 is yours. call 09061209465 now! C Suprman V, Matrix3, StarWars3, etc all 4 FREE! bx420-ip4-5we. 150pm. Dont miss out!
ham Nothin comes to my mind. Ü help me buy hanger lor. Ur laptop not heavy?
ham <#> , that's all? Guess that's easy enough
ham We can make a baby in yo tho
ham Should I tell my friend not to come round til like <#> ish?
ham Friendship poem: Dear O Dear U R Not Near But I Can Hear Dont Get Fear Live With Cheer No More Tear U R Always my Dear. Gud ni8
ham Still in the area of the restaurant. Ill try to come back soon
ham Aight that'll work, thanks
spam WIN a year supply of CDs 4 a store of ur choice worth £500 & enter our £100 Weekly draw txt MUSIC to 87066 Ts&Cs www.Ldew.com.subs16+1win150ppmx3
spam Moby Pub Quiz.Win a £100 High Street prize if u know who the new Duchess of Cornwall will be? Txt her first name to 82277.unsub STOP £1.50 008704050406 SP Arrow
ham I have 2 sleeping bags, 1 blanket and paper and phone details. Anything else?
spam You have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+
spam Congratulations! Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08718726971 NOW! Only 10p per minute. BT-national-rate.
spam [email protected] (More games from TheDailyDraw) Dear Helen, Dozens of Free Games - with great prizesWith..
ham So what do you guys do.
ham Also that chat was awesome but don't make it regular unless you can see her in person
ham That's significant but dont worry.
ham That's cause your old. I live to be high.
ham Waqt se pehle or naseeb se zyada kisi ko kuch nahi milta,Zindgi wo nahi he jo hum sochte hai Zindgi wo hai jo ham jeetey hai..........
ham On the way to office da..
ham In which place do you want da.
ham This pain couldn't have come at a worse time.
ham Ok...
ham Should I be stalking u?
ham Sorry dude. Dont know how i forgot. Even after Dan reminded me. Sorry. Hope you guys had fun.
ham Ok lor.
ham Apps class varaya elaya.
ham The Xmas story is peace.. The Xmas msg is love.. The Xmas miracle is jesus.. Hav a blessed month ahead & wish U Merry Xmas...
spam URGENT! Your mobile number *************** WON a £2000 Bonus Caller prize on 10/06/03! This is the 2nd attempt to reach you! Call 09066368753 ASAP! Box 97N7QP, 150ppm
ham That day you asked about anand number. Why:-)
ham Am surfing online store. For offers do you want to buy any thing.
ham Long beach lor. Expected... U having dinner now?
ham At home by the way
ham We are both fine. Thanks
ham What happen to her tell the truth
ham Do you like Italian food?
ham Which is weird because I know I had it at one point
ham "Aww you must be nearly dead!Well Jez isComing over toDo some workAnd that whillTake forever!"
ham Tell your friends what you plan to do on Valentines day @ <URL>
ham Alright, see you in a bit
ham Cheers for the message Zogtorius. Ive been staring at my phone for an age deciding whether to text or not.
ham I will take care of financial problem.i will help:)
ham Tell dear what happen to you. Why you talking to me like an alian
spam Double your mins & txts on Orange or 1/2 price linerental - Motorola and SonyEricsson with B/Tooth FREE-Nokia FREE Call MobileUpd8 on 08000839402 or2optout/HV9D
ham 1) Go to write msg 2) Put on Dictionary mode 3)Cover the screen with hand, 4)Press <#> . 5)Gently remove Ur hand.. Its interesting..:)
ham Okie...
ham Hi this is yijue, can i meet u at 11 tmr?
ham Its posible dnt live in <#> century cm frwd n thnk different
ham But i dint slept in afternoon.
ham That seems unnecessarily affectionate
ham Yar else i'll thk of all sorts of funny things.
ham You will be in the place of that man
spam Download as many ringtones as u like no restrictions, 1000s 2 choose. U can even send 2 yr buddys. Txt Sir to 80082 £3
ham Thats cool. How was your day?
spam Please CALL 08712402902 immediately as there is an urgent message waiting for you.
ham R we going with the <#> bus?
ham Hello, my love ! How went your day ? Are you alright ? I think of you, my sweet and send a jolt to your heart to remind you ... I LOVE YOU! Can you hear it ? I screamed it across the sea for all the world to hear. Ahmad al Hallaq is loved ! and owned ! *possessive passionate kiss*
ham No..he joined today itself.
ham Okay same with me. Well thanks for the clarification
ham I'll talk to the others and probably just come early tomorrow then
spam Spook up your mob with a Halloween collection of a logo & pic message plus a free eerie tone, txt CARD SPOOK to 8007 zed 08701417012150p per logo/pic
ham Had the money issue weigh me down but thanks to you, I can breathe easier now. I.ll make sure you dont regret it. Thanks.
ham Hi. I'm sorry i missed your call. Can you pls call back.
ham How are you doing? Hope you've settled in for the new school year. Just wishin you a gr8 day
spam Fantasy Football is back on your TV. Go to Sky Gamestar on Sky Active and play £250k Dream Team. Scoring starts on Saturday, so register now!SKY OPT OUT to 88088
ham Ok then no need to tell me anything i am going to sleep good night
ham Ok try to do week end course in coimbatore.
spam Tone Club: Your subs has now expired 2 re-sub reply MONOC 4 monos or POLYC 4 polys 1 weekly @ 150p per week Txt STOP 2 stop This msg free Stream 0871212025016
ham V nice! Off 2 sheffield tom 2 air my opinions on categories 2 b used 2 measure ethnicity in next census. Busy transcribing. :-)
ham If you r @ home then come down within 5 min
ham A Boy loved a gal. He propsd bt she didnt mind. He gv lv lttrs, Bt her frnds threw thm. Again d boy decided 2 aproach d gal , dt time a truck was speeding towards d gal. Wn it was about 2 hit d girl,d boy ran like hell n saved her. She asked 'hw cn u run so fast?' D boy replied "Boost is d secret of my energy" n instantly d girl shouted "our energy" n Thy lived happily 2gthr drinking boost evrydy Moral of d story:- I hv free msgs:D;): gud ni8
ham That day ü say ü cut ur hair at paragon, is it called hair sense? Do ü noe how much is a hair cut?
ham Hmm, too many of them unfortunately... Pics obviously arent hot cakes. Its kinda fun tho
ham Watching tv lor... Y she so funny we bluff her 4 wat. Izzit because she thk it's impossible between us?
spam XMAS Prize draws! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09058094565 from land line. Valid 12hrs only
ham Dunno lei he neva say...
ham Thanx 4 2day! U r a goodmate I THINK UR RITE SARY! ASUSUAL!1 U CHEERED ME UP! LOVE U FRANYxxxxx
ham I'm on my way home. Went to change batt 4 my watch then go shop a bit lor.
spam YES! The only place in town to meet exciting adult singles is now in the UK. Txt CHAT to 86688 now! 150p/Msg.
ham Hi, Mobile no. <#> has added you in their contact list on www.fullonsms.com It s a great place to send free sms to people For more visit fullonsms.com
ham Good evening Sir, hope you are having a nice day. I wanted to bring it to your notice that I have been late in paying rent for the past few months and have had to pay a $ <#> charge. I felt it would be inconsiderate of me to nag about something you give at great cost to yourself and that's why i didnt speak up. I however am in a recession and wont be able to pay the charge this month hence my askin well ahead of month's end. Can you please help. Thank you for everything.
ham If i let you do this, i want you in the house by 8am.
ham Best line said in Love: . "I will wait till the day I can forget u Or The day u realize that u cannot forget me."... Gn
ham I will reach before ten morning
ham Your pussy is perfect!
ham Sorry, I'll call later
spam Someone has contacted our dating service and entered your phone becausethey fancy you! To find out who it is call from a landline 09058098002. PoBox1, W14RG 150p
ham No message..no responce..what happend?
ham Also where's the piece
ham wiskey Brandy Rum Gin Beer Vodka Scotch Shampain Wine "KUDI"yarasu dhina vaazhthukkal. ..
ham Boo. How's things? I'm back at home and a little bored already :-(
ham First has she gained more than <#> kg since she took in. Second has she done the blood sugar tests. If she has and its ok and her blood pressure is within normal limits then no worries
ham PICK UR FONE UP NOW U DUMB?
ham Thanks da thangam, i feel very very happy dear. I also miss you da.
ham Okey doke. I'm at home, but not dressed cos laying around ill! Speak to you later bout times and stuff.
ham I don't run away frm u... I walk slowly & it kills me that u don't care enough to stop me...
ham Babe, I'm back ... Come back to me ...
ham Well you told others you'd marry them...
ham Neshanth..tel me who r u?
ham YO YO YO BYATCH WHASSUP?
ham Oh... Kay... On sat right?
ham Hi! This is Roger from CL. How are you?
spam Babe: U want me dont u baby! Im nasty and have a thing 4 filthyguys. Fancy a rude time with a sexy bitch. How about we go slo n hard! Txt XXX SLO(4msgs)
ham Oh oh... Wasted... Den muz chiong on sat n sun liao...
ham Jesus christ bitch I'm trying to give you drugs answer your fucking phone
ham Please give it 2 or i will pick it up on Tuesday evening about 8 if that is ok.
ham I'm meeting Darren...
ham One of best dialogue in cute reltnship..!! "Wen i Die, Dont Come Near My Body..!! Bcoz My Hands May Not Come 2 Wipe Ur Tears Off That Time..!Gud ni8
ham Solve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.
ham Dear where you will be when i reach there
ham Aww that's the first time u said u missed me without asking if I missed u first. You DO love me! :)
ham Ok... Thanx... Gd nite 2 ü too...
ham Come to me right now, Ahmad
spam I don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.
ham Lol please do. Actually send a pic of yourself right now. I wanna see. Pose with a comb and hair dryer or something.
ham O was not into fps then.
ham Huh means computational science... Y they like dat one push here n there...
ham Could you not read me, my Love ? I answered you
ham Oh... Lk tt den we take e one tt ends at cine lor... Dun wan yogasana oso can...
ham Madam,regret disturbance.might receive a reference check from DLF Premarica.kindly be informed.Rgds,Rakhesh,Kerala.
spam SMS SERVICES For your inclusive text credits pls gotto www.comuk.net login 3qxj9 unsubscribe with STOP no extra charge help 08702840625 comuk.220cm2 9AE
ham Oic... Then better quickly go bathe n settle down...
ham Err... Cud do. I'm going to at 8pm. I haven't got a way to contact him until then.
ham A bloo bloo bloo I'll miss the first bowl
ham Lmao but its so fun...
ham Oh k k:)but he is not a big hitter.anyway good
ham Hey!!! I almost forgot ... Happy B-day babe ! I love ya!!
spam Valentines Day Special! Win over £1000 in our quiz and take your partner on the trip of a lifetime! Send GO to 83600 now. 150p/msg rcvd. CustCare:08718720201
ham Do you think i can move <#> in a week
ham She.s find. I sent you an offline message to know how anjola's now.
spam Guess who am I?This is the first time I created a web page WWW.ASJESUS.COM read all I wrote. I'm waiting for your opinions. I want to be your friend 1/1
ham How was txting and driving
ham That's good. Lets thank God. Please complete the drug. Have lots of water. And have a beautiful day.
ham Really dun bluff me leh... U sleep early too. Nite...
ham Indians r poor but India is not a poor country. Says one of the swiss bank directors. He says that " <#> lac crore" of Indian money is deposited in swiss banks which can be used for 'taxless' budget for <#> yrs. Can give <#> crore jobs to all Indians. From any village to Delhi 4 lane roads. Forever free power suply to more than <#> social projects. Every citizen can get monthly <#> /- for <#> yrs. No need of World Bank & IMF loan. Think how our money is blocked by rich politicians. We have full rights against corrupt politicians. Itna forward karo ki pura INDIA padhe.g.m."
ham Uncle boye. I need movies oh. Guide me. Plus you know torrents are not particularly legal here. And the system is slowing down. What should i do. Have a gr8 day. Plus have you started cos i dont meet you online. How was the honey moon.
ham Oh ya ya. I remember da. .
ham Btw regarding that we should really try to see if anyone else can be our 4th guy before we commit to a random dude
spam For ur chance to win £250 cash every wk TXT: PLAY to 83370. T's&C's www.music-trivia.net custcare 08715705022, 1x150p/wk.
ham I not busy juz dun wan 2 go so early.. Hee..
ham Rightio. 11.48 it is then. Well arent we all up bright and early this morning.
ham Great. I'm in church now, will holla when i get out
ham Back in brum! Thanks for putting us up and keeping us all and happy. See you soon
ham I donno if they are scorable
ham <#> great loxahatchee xmas tree burning update: you can totally see stars here
ham Yes but i dont care! I need you bad, princess!
ham The guy (kadeem) hasn't been selling since the break, I know one other guy but he's paranoid as fuck and doesn't like selling without me there and I can't be up there til late tonight
ham Sorry, I'll call later
ham Tmr then ü brin lar... Aiya later i come n c lar... Mayb ü neva set properly ü got da help sheet wif ü...
ham Do u knw dis no. <#> ?
ham Then she dun believe wat?
ham K..give back my thanks.
ham I know complain num only..bettr directly go to bsnl offc nd apply for it..
ham Okay. I've seen it. So i should pick it on friday?
ham How much she payed. Suganya.
ham Left dessert. U wan me 2 go suntec look 4 u?
ham Abeg, make profit. But its a start. Are you using it to get sponsors for the next event?
ham Onum ela pa. Normal than.
ham K.k..how is your sister kids?
ham Cool, I'll text you when I'm on the way
ham Nope. Meanwhile she talk say make i greet you.
ham i cant talk to you now.i will call when i can.dont keep calling.
ham Anything lar...
ham Rose needs water, season needs change, poet needs imagination..My phone needs ur sms and i need ur lovely frndship forever....
ham Good afternoon, babe. How goes that day ? Any job prospects yet ? I miss you, my love ... *sighs* ... :-(
ham They will pick up and drop in car.so no problem..
ham S.i think he is waste for rr..
ham He is world famamus....
ham Is there coming friday is leave for pongal?do you get any news from your work place.
ham Lol well don't do it without me. We could have a big sale together.
ham * Am on my way
ham Eat at old airport road... But now 630 oredi... Got a lot of pple...
ham sry can't talk on phone, with parents
spam Final Chance! Claim ur £150 worth of discount vouchers today! Text YES to 85023 now! SavaMob, member offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Subs 16
ham Ok lor wat time ü finish?
ham Princess, i like to make love <#> times per night. Hope thats not a problem!
ham Mm i am on the way to railway
ham i dnt wnt to tlk wid u
ham I'm done. I'm sorry. I hope your next space gives you everything you want. Remember all the furniture is yours. If i'm not around when you move it, just lock all the locks and leave the key with jenne.
ham Not yet. Just i'd like to keep in touch and it will be the easiest way to do that from barcelona. By the way how ru and how is the house?
spam Sppok up ur mob with a Halloween collection of nokia logo&pic message plus a FREE eerie tone, txt CARD SPOOK to 8007
spam Urgent! call 09066612661 from landline. Your complementary 4* Tenerife Holiday or £10,000 cash await collection SAE T&Cs PO Box 3 WA14 2PX 150ppm 18+ Sender: Hol Offer
ham K.:)do it at evening da:)urgent:)
ham Pansy! You've been living in a jungle for two years! Its my driving you should be more worried about!
ham Mm have some kanji dont eat anything heavy ok
ham Only if you promise your getting out as SOON as you can. And you'll text me in the morning to let me know you made it in ok.
ham Lol that's different. I don't go trying to find every real life photo you ever took.
ham I dont thnk its a wrong calling between us
ham K ill drink.pa then what doing. I need srs model pls send it to my mail id pa.
ham Aiyah e rain like quite big leh. If drizzling i can at least run home.
ham I have 2 docs appointments next week.:/ I'm tired of them shoving stuff up me. Ugh why couldn't I have had a normal body?
ham Dun b sad.. It's over.. Dun thk abt it already. Concentrate on ur other papers k.
ham Greetings me, ! Consider yourself excused.
ham No drama Pls.i have had enough from you and family while i am struggling in the hot sun in a strange place.No reason why there should be an ego of not going 'IF NOT INVITED' when actually its necessity to go.wait for very serious reppurcussions.
ham they released another Italian one today and it has a cosign option
ham You at mu? You should try to figure out how much money everyone has for gas and alcohol, jay and I are trying to figure out our weed budget
spam WINNER! As a valued network customer you hvae been selected to receive a £900 reward! To collect call 09061701444. Valid 24 hours only. ACL03530150PM
ham HCL chennai requires FRESHERS for voice process.Excellent english needed.Salary upto <#> .Call Ms.Suman <#> for Telephonic interview -via Indyarocks.com
ham Dai what this da.. Can i send my resume to this id.
ham I know where the <#> is, I'll be there around 5
ham Yup i've finished c ü there...
ham Remember to ask alex about his pizza
ham No da..today also i forgot..
ham Ola would get back to you maybe not today but I ve told him you can be his direct link in the US in getting cars he bids for online, you arrange shipping and you get a cut. Or U????? For a partnership where U????? Invest money for shipping and he takes care of the rest!U??Wud b self reliant soon dnt worry
ham Fwiw the reason I'm only around when it's time to smoke is that because of gas I can only afford to be around when someone tells me to be and that apparently only happens when somebody wants to light up
ham Hello, my boytoy! I made it home and my constant thought is of you, my love. I hope your having a nice visit but I can't wait till you come home to me ...*kiss*
ham Congrats kano..whr s the treat maga?
ham Who u talking about?
ham Yup...
ham Ok...
ham U wake up already? Wat u doing? U picking us up later rite? I'm taking sq825, reaching ard 7 smth 8 like dat. U can check e arrival time. C ya soon...
ham Yunny i'm walking in citylink now ü faster come down... Me very hungry...
ham Er yep sure. Props?
ham Hiya , have u been paying money into my account? If so, thanks. Got a pleasant surprise when i checked my balance -u c, i don't get statements 4 that acc
spam U have won a nokia 6230 plus a free digital camera. This is what u get when u win our FREE auction. To take part send NOKIA to 83383 now. POBOX114/14TCR/W1 16
ham Ok ill send you with in <DECIMAL> ok.
ham Bognor it is! Should be splendid at this time of year.
ham Yes.i'm in office da:)
ham Sorry, I'll call later
ham Joy's father is John. Then John is the NAME of Joy's father. Mandan
ham Ok. I only ask abt e movie. U wan ktv oso?
ham Misplaced your number and was sending texts to your old number. Wondering why i've not heard from you this year. All the best in your mcat. Got this number from my atlanta friends
ham Sorry, I'll call later
ham Dunno lei... I might b eatin wif my frens... If ü wan to eat then i wait 4 ü lar
ham Sorry, I'll call later
spam FREE entry into our £250 weekly comp just send the word WIN to 80086 NOW. 18 T&C www.txttowin.co.uk
ham Say this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...
ham Do u noe how 2 send files between 2 computers?
ham Mmmmm ... I loved waking to your words this morning ! I miss you too, my Love. I hope your day goes well and you are happy. I wait for us to be together again
ham jay says he'll put in <#>
ham Can you just come in for a sec? There's somebody here I want you to see
ham So the sun is anti sleep medicine.
ham What's happening with you. Have you gotten a job and have you begun registration for permanent residency
ham Yup ok...
ham Glad it went well :) come over at 11 then we'll have plenty of time before claire goes to work.
ham Ok enjoy . R u there in home.
ham Can you pls pls send me a mail on all you know about relatives coming to deliver here? All you know about costs, risks, benefits and anything else. Thanks.
ham You do what all you like
ham That's y we haf to combine n c how lor...
ham The monthly amount is not that terrible and you will not pay anything till 6months after finishing school.
ham Hmmm:)how many players selected?
ham They said if its gonna snow, it will start around 8 or 9 pm tonite! They are predicting an inch of accumulation.
ham I dont. Can you send it to me. Plus how's mode.
ham Aiyo please ü got time meh.
ham Package all your programs well
ham She is our sister.. She belongs 2 our family.. She is d hope of tomorrow.. Pray 4 her,who was fated 4 d Shoranur train incident. Lets hold our hands together & fuelled by love & concern prior 2 her grief & pain. Pls join in dis chain & pass it. STOP VIOLENCE AGAINST WOMEN.
ham So are you guys asking that i get that slippers again or its gone with last year
ham Company is very good.environment is terrific and food is really nice:)
spam Text82228>> Get more ringtones, logos and games from www.txt82228.com. Questions: [email protected]
ham Honestly i've just made a lovely cup of tea and promptly dropped my keys in it and then burnt my fingers getting them out!
ham Yup but not studying surfing lor. I'm in e lazy mode today.
ham Please sen :)my kind advice :-)please come here and try:-)
ham I'm done. C ü there.
ham Oh fine, I'll be by tonight
ham Ü give me some time to walk there.
ham I'll reach in ard 20 mins ok...
spam FreeMSG You have been awarded a FREE mini DIGITAL CAMERA, just reply SNAP to collect your prize! (quizclub Opt out? Stop 80122300p/wk SP:RWM Ph:08704050406)
ham Fuck babe ... What happened to you ? How come you never came back?
spam This message is brought to you by GMW Ltd. and is not connected to the
ham Some friends want me to drive em someplace, probably take a while
ham I also thk too fast... Xy suggest one not me. U dun wan it's ok. Going 2 rain leh where got gd.
ham Are you still getting the goods.
ham And maybe some pressies
ham Yeah I am, so I'll leave maybe 7ish?
ham K..k..i'm also fine:)when will you complete the course?
ham Under the sea, there lays a rock. In the rock, there is an envelope. In the envelope, there is a paper. On the paper, there are 3 words... '
ham I told her I had a Dr appt next week. She thinks I'm gonna die. I told her its just a check. Nothing to be worried about. But she didn't listen.
ham You in your room? I need a few
ham I dont want to hear anything
ham Hey. For me there is no leave on friday. Wait i will ask my superior and tell you..
ham Ultimately tor motive tui achieve korli.
ham From 5 to 2 only my work timing.
ham … and don‘t worry we‘ll have finished by march … ish!
ham The house is on the water with a dock, a boat rolled up with a newscaster who dabbles in jazz flute behind the wheel
spam Congrats 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid ur mates, play java games, Dload polypH music, noline rentl. bx420. ip4. 5we. 150p
spam Your next amazing xxx PICSFREE1 video will be sent to you enjoy! If one vid is not enough for 2day text back the keyword PICSFREE1 to get the next video.
ham Now thats going to ruin your thesis!
ham In sch but neva mind u eat 1st lor..
ham Hey whats up? U sleeping all morning?
ham Erm. I thought the contract ran out the4th of october.
ham I dunno until when... Lets go learn pilates...
spam U are subscribed to the best Mobile Content Service in the UK for £3 per ten days until you send STOP to 83435. Helpline 08706091795.
ham Yup i'm elaborating on the safety aspects and some other issues..
spam 3 FREE TAROT TEXTS! Find out about your love life now! TRY 3 FOR FREE! Text CHANCE to 85555 16 only! After 3 Free, Msgs £1.50 each
ham Goodmorning, today i am late for 1hr.
ham Hi happy birthday. Hi hi hi hi hi hi hi
ham I will be outside office take all from there
ham If you don't respond imma assume you're still asleep and imma start calling n shit
ham Aight, see you in a bit
ham My superior telling that friday is leave for all other department except ours:)so it will be leave for you:)any way call waheed fathima hr and conform it:)
spam Join the UK's horniest Dogging service and u can have sex 2nite!. Just sign up and follow the instructions. Txt ENTRY to 69888 now! Nyt.EC2A.3LP.msg@150p
ham Lol I have to take it. member how I said my aunt flow didn't visit for 6 months? It's cause I developed ovarian cysts. Bc is the only way to shrink them.
ham Still work going on:)it is very small house.
ham My friend just got here and says he's upping his order by a few grams (he's got $ <#> ), when can you get here?
ham Tmr timin still da same wat cos i got lesson until 6...
ham That‘s the thing with apes, u can fight to the death to keep something, but the minute they have it when u let go, thats it!
spam Sunshine Quiz Wkly Q! Win a top Sony DVD player if u know which country Liverpool played in mid week? Txt ansr to 82277. £1.50 SP:Tyrone
ham No i'm not gonna be able to. || too late notice. || i'll be home in a few weeks anyway. || what are the plans
ham Got fujitsu, ibm, hp, toshiba... Got a lot of model how to say...
ham Okie... Thanx...
ham Gosh that , what a pain. Spose I better come then.
ham As usual..iam fine, happy & doing well..:)
ham Okie
ham So when you gonna get rimac access
ham "Im at arestaurant eating squid! i will be out about 10:30 wanna dosomething or is that to late?"
ham You call times job today ok umma and ask them to speed up
ham "HELLO U.CALL WEN U FINISH WRK.I FANCY MEETIN UP WIV U ALL TONITE AS I NEED A BREAK FROM DABOOKS. DID 4 HRS LAST NITE+2 TODAY OF WRK!"
ham R U &SAM P IN EACHOTHER. IF WE MEET WE CAN GO 2 MY HOUSE
ham :-) yeah! Lol. Luckily i didn't have a starring role like you!
ham Hello madam how are you ?
ham Awesome, text me when you're restocked
ham As usual..iam fine, happy & doing well..:)
spam Knock Knock Txt whose there to 80082 to enter r weekly draw 4 a £250 gift voucher 4 a store of yr choice. T&Cs www.tkls.com age16 to stoptxtstop£1.50/week
ham Yes. It's all innocent fun. O:-)
ham Thanks for sending this mental ability question..
ham Sir, hope your day is going smoothly. i really hoped i wont have to bother you about this. I have some bills that i can't settle this month. I am out of all extra cash. I know this is a challenging time for you also but i have to let you know.
ham 2marrow only. Wed at <#> to 2 aha.
ham I went to ur hon lab but no one is there.
ham I cant pick the phone right now. Pls send a message
ham Hey pple...$700 or $900 for 5 nights...Excellent location wif breakfast hamper!!!
spam <Forwarded from 21870000>Hi - this is your Mailbox Messaging SMS alert. You have 40 matches. Please call back on 09056242159 to retrieve your messages and matches cc100p/min
ham How come?
ham Lol! Nah wasn't too bad thanks. Its good to b home but its been quite a reality check. Hows ur day been? Did u do anything with website?
ham Ok lor...
ham I'm coming home 4 dinner.
ham S da..al r above <#>
spam FREE RING TONE just text "POLYS" to 87131. Then every week get a new tone. 0870737910216yrs only £1.50/wk.
ham Unni thank you dear for the recharge..Rakhesh
ham I know I'm lacking on most of this particular dramastorm's details but for the most part I'm not worried about that
ham Haha... They cant what... At the most tmr forfeit... haha so how?
ham Hey there! Glad u r better now. I hear u treated urself to a digi cam, is it good? We r off at 9pm. Have a fab new year, c u in coupla wks!
ham No way I'm going back there!
spam URGENT! Your mobile No 077xxx WON a £2,000 Bonus Caller Prize on 02/06/03! This is the 2nd attempt to reach YOU! Call 09066362206 ASAP! BOX97N7QP, 150ppm
ham I WILL CAL YOU SIR. In meeting
ham That's what I love to hear :V see you sundayish, then
ham Sorry da thangam, very very sorry i am held up with prasad.
ham Tiwary to rcb.battle between bang and kochi.
ham Thank god they are in bed!
ham No I don't have cancer. Moms making a big deal out of a regular checkup aka pap smear
ham Am in gobi arts college
ham Why she wants to talk to me
ham Pandy joined 4w technologies today.he got job..
spam You are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18
ham They can try! They can get lost, in fact. Tee hee
ham Hi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.
ham My friends use to call the same.
ham Sorry, I'll call later
ham Em, its olowoyey@ usc.edu have a great time in argentina. Not sad about secretary, everything is a blessing
ham It,,s a taxt massage....tie-pos argh ok! Lool!
ham Hi, can i please get a <#> dollar loan from you. I.ll pay you back by mid february. Pls.
ham You might want to pull out more just in case and just plan on not spending it if you can, I don't have much confidence in derek and taylor's money management
ham Do you like shaking your booty on the dance floor?
ham Text me when you get off, don't call, my phones having problems
ham No need for the drug anymore.
ham Sorry da:)i was thought of calling you lot of times:)lil busy.i will call you at noon..
ham Its sarcasm.. .nt scarcasim
ham Great! I have to run now so ttyl!
ham Feel like trying kadeem again? :V
ham Dai <#> naal eruku.
ham Not yet chikku..wat abt u?
ham Ok...
ham Want to finally have lunch today?
ham Do you know when dad will be back?
spam Hello darling how are you today? I would love to have a chat, why dont you tell me what you look like and what you are in to sexy?
spam 8007 FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W4 5WQ norm 150p/tone 16+
ham He remains a bro amongst bros
ham R u meeting da ge at nite tmr?
ham * Was a nice day and, impressively, i was sensible, went home early and now feel fine. Or am i just boring?! When's yours, i can't remember.
ham Why de. You looking good only:-)..
spam Wanna get laid 2nite? Want real Dogging locations sent direct to ur mobile? Join the UK's largest Dogging Network. Txt PARK to 69696 now! Nyt. ec2a. 3lp £1.50/msg
spam we tried to contact you re your response to our offer of a new nokia fone and camcorder hit reply or call 08000930705 for delivery
ham Yes. They replied my mail. I'm going to the management office later. Plus will in to bank later also.or on wednesday.
ham That's cool, I'll come by like <#> ish
ham Super msg da:)nalla timing.
ham Good afternoon, my boytoy ... How are you feeling today ? Better I hope? Are you being my good boy? Are you my obedient, slave? Do you please your Queen?
ham I am 6 ft. We will be a good combination!
ham I'm sick !! I'm needy !! I want you !! *pouts* *stomps feet* Where are you ?! *pouts* *stomps feet* I want my slave !! I want him now !!
ham * Am on a train back from northampton so i'm afraid not!
ham Where in abj are you serving. Are you staying with dad or alone.
ham Was playng 9 doors game and gt racing on phone lol
spam New Tones This week include: 1)McFly-All Ab.., 2) Sara Jorge-Shock.. 3) Will Smith-Switch.. To order follow instructions on next message
ham Solve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.
ham I'm on da bus going home...
ham I got a call from a landline number. . . I am asked to come to anna nagar . . . I will go in the afternoon
ham I'm okay. Chasing the dream. What's good. What are you doing next.
ham Yupz... I've oredi booked slots 4 my weekends liao...
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050003091 from land line. Claim C52. Valid 12hrs only
ham There r many model..sony ericson also der.. <#> ..it luks good bt i forgot modl no
ham Okie
ham Yes I know the cheesy songs from frosty the snowman :)
ham Ya ok, vikky vl c witin <#> mins and il reply u..
spam sports fans - get the latest sports news str* 2 ur mobile 1 wk FREE PLUS a FREE TONE Txt SPORT ON to 8007 www.getzed.co.uk 0870141701216+ norm 4txt/120p
ham Hey tmr meet at bugis 930 ?
spam Urgent Urgent! We have 800 FREE flights to Europe to give away, call B4 10th Sept & take a friend 4 FREE. Call now to claim on 09050000555. BA128NNFWFLY150ppm
ham All these nice new shirts and the only thing I can wear them to is nudist themed ;_; you in mu?
ham Hey sexy buns! What of that day? No word from you this morning on YM ... :-( ... I think of you
ham And whenever you and i see we can still hook up too.
ham Nope but i'm going home now then go pump petrol lor... Like going 2 rain soon...
ham Can you use foreign stamps for whatever you send them off for?
spam FROM 88066 LOST £12 HELP
ham Oh baby of the house. How come you dont have any new pictures on facebook
ham Feb <#> is "I LOVE U" day. Send dis to all ur "VALUED FRNDS" evn me. If 3 comes back u'll gt married d person u luv! If u ignore dis u will lose ur luv 4 Evr
ham Hiya, sorry didn't hav signal. I haven't seen or heard from and neither has, which is unusual in itself! I'll put on the case and get him to sort it out! Hugs and snogs.
ham Omw back to tampa from west palm, you hear what happened?
ham Yup no more already... Thanx 4 printing n handing it up.
spam FreeMsg: Fancy a flirt? Reply DATE now & join the UKs fastest growing mobile dating service. Msgs rcvd just 25p to optout txt stop to 83021. Reply DATE now!
ham What i mean is do they come chase you out when its over or is it stated you can watch as many movies as you want.
ham S now only i took tablets . Reaction morning only.
spam Great NEW Offer - DOUBLE Mins & DOUBLE Txt on best Orange tariffs AND get latest camera phones 4 FREE! Call MobileUpd8 free on 08000839402 NOW! or 2stoptxt T&Cs
ham Nah, I'm a perpetual DD
ham Sorry de i went to shop.
spam Hope you enjoyed your new content. text stop to 61610 to unsubscribe. help:08712400602450p Provided by tones2you.co.uk
ham Wen ur lovable bcums angry wid u, dnt take it seriously.. Coz being angry is d most childish n true way of showing deep affection, care n luv!.. kettoda manda... Have nice day da.
ham Hey you still want to go for yogasana? Coz if we end at cine then can go bathe and hav the steam bath
ham Nope i'm not drivin... I neva develop da photos lei...
ham I am thinking of going down to reg for pract lessons.. Flung my advance.. Haha wat time u going?
ham Cool. I am <#> inches long. hope you like them big!
ham House-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?
ham Okie.. Thanx..
spam 18 days to Euro2004 kickoff! U will be kept informed of all the latest news and results daily. Unsubscribe send GET EURO STOP to 83222.
ham Go where n buy? Juz buy when we get there lar.
ham Ok lor...
ham I'm working technical support :)voice process.
ham It's justbeen overa week since we broke up and already our brains are going to mush!
ham Tunde, how are you doing. This is just wishing you a great day. Abiola.
ham Nope... C ü then...
ham No. But we'll do medical missions to nigeria
ham No i am not having not any movies in my laptop
ham Whatsup there. Dont u want to sleep
spam Urgent Please call 09066612661 from landline. £5000 cash or a luxury 4* Canary Islands Holiday await collection. T&Cs SAE award. 20M12AQ. 150ppm. 16+ “
spam Urgent! Please call 09066612661 from your landline, your complimentary 4* Lux Costa Del Sol holiday or £1000 CASH await collection. ppm 150 SAE T&Cs James 28, EH74RR
ham I havent lei.. Next mon can?
ham Mm feeling sleepy. today itself i shall get that dear
ham How dare you stupid. I wont tell anything to you. Hear after i wont talk to you:-.
ham Do ü noe if ben is going?
ham Can you do a mag meeting this avo at some point?
ham I meant middle left or right?
ham Really? I crashed out cuddled on my sofa.
ham Hi Chachi tried calling u now unable to reach u .. Pl give me a missed cal once u c tiz msg Kanagu
ham I sent you the prices and do you mean the <#> g,
ham Are you this much buzy
ham Nothing. Can...
spam I don't know u and u don't know me. Send CHAT to 86688 now and let's find each other! Only 150p/Msg rcvd. HG/Suite342/2Lands/Row/W1J6HL LDN. 18 years or over.
ham No * am working on the ringing u thing but have whole houseful of screaming brats so * am pulling my hair out! Loving u
ham But my family not responding for anything. Now am in room not went to home for diwali but no one called me and why not coming. It makes me feel like died.
ham Tick, tick, tick ... Babe
ham R ü going 4 today's meeting?
ham K da:)how many page you want?
ham Ya had just now.onion roast.
ham Send his number and give reply tomorrow morning for why you said that to him like that ok
ham You said not now. No problem. When you can. Let me know.
ham Ok but tell me half an hr b4 u come i need 2 prepare.
ham Play w computer? Aiyah i tok 2 u lor?
ham Sat right? Okay thanks...
ham Derp. Which is worse, a dude who always wants to party or a dude who files a complaint about the three drug abusers he lives with
ham Ok Chinese food on its way. When I get fat you're paying for my lipo.
ham We r outside already.
ham Have a good trip. Watch out for . Remember when you get back we must decide about easter.
ham Yo we are watching a movie on netflix
ham What time. I‘m out until prob 3 or so
ham Can meh? Thgt some will clash... Really ah, i dun mind... I dun seen to have lost any weight... Gee...
ham I dont thnk its a wrong calling between us
ham I am not sure about night menu. . . I know only about noon menu
ham ARR birthday today:) i wish him to get more oscar.
ham Say this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...
ham Open rebtel with firefox. When it loads just put plus sign in the user name place, and it will show you two numbers. The lower number is my number. Once you pick that number the pin will display okay!
ham and picking them up from various points
spam Married local women looking for discreet action now! 5 real matches instantly to your phone. Text MATCH to 69969 Msg cost 150p 2 stop txt stop BCMSFWC1N3XX
ham Wow v v impressed. Have funs shopping!
ham I am on the way to ur home
spam Burger King - Wanna play footy at a top stadium? Get 2 Burger King before 1st Sept and go Large or Super with Coca-Cola and walk out a winner
ham No problem. Talk to you later
ham Then ur sis how?
ham Still in customer place
spam How come it takes so little time for a child who is afraid of the dark to become a teenager who wants to stay out all night?
ham Dude u knw also telugu..thts gud..k, gud nyt..
ham We confirm eating at esplanade?
ham Send me your id and password
ham Kind of. Took it to garage. Centre part of exhaust needs replacing. Part ordered n taking it to be fixed tomo morning.
spam For ur chance to win a £250 cash every wk TXT: ACTION to 80608. T's&C's www.movietrivia.tv custcare 08712405022, 1x150p/wk.
ham Well I might not come then...
ham Long after I quit. I get on only like 5 minutes a day as it is.
ham Then its most likely called Mittelschmertz. Google it. If you dont have paracetamol dont worry it will go.
ham Well at this right I'm gonna have to get up and check today's steam sales/pee so text me when you want me to come get you
ham Just arrived, see you in a couple days <3
ham K, wat s tht incident?
ham Yeah get the unlimited
ham cThen i thk shd b enuff.. Still got conclusion n contents pg n references.. I'll b doing da contents pg n cover pg..
ham Forgot it takes me 3 years to shower, sorry. Where you at/your phone dead yet?
ham Ü got wat to buy tell us then ü no need to come in again.
ham When you are big..| God will bring success.
spam U’ve Bin Awarded £50 to Play 4 Instant Cash. Call 08715203028 To Claim. EVERY 9th Player Wins Min £50-£500. OptOut 08718727870
ham … we r stayin here an extra week, back next wed. How did we do in the rugby this weekend? Hi to and and , c u soon "
ham Well there's still a bit left if you guys want to tonight
ham Not from this campus. Are you in the library?
ham The affidavit says <#> E Twiggs St, division g, courtroom <#> , <TIME> AM. I'll double check and text you again tomorrow
ham How will I creep on you now? ;_;
ham Tell your friends what you plan to do on Valentines day @ <URL>
ham If I get there before you after your ten billion calls and texts so help me god
ham Purity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng musthu
ham I've told him that i've returned it. That should i re order it.
ham House-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..
ham Depends on where u going lor.
ham And smile for me right now as you go and the world will wonder what you are smiling about and think your crazy and keep away from you ... *grins*
spam FreeMsg>FAV XMAS TONES!Reply REAL
ham Lil fever:) now fine:)
ham I think it's all still in my car
ham Can a not?
spam December only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906
ham Yes princess! I want to catch you with my big strong hands...
ham Oh yeah I forgot. U can only take 2 out shopping at once.
ham Mm so you asked me not to call radio
ham Thinkin about someone is all good. No drugs for that
ham Say this slowly.? GOD,I LOVE YOU & I NEED YOU,CLEAN MY HEART WITH YOUR BLOOD.Send this to Ten special people & u c miracle tomorrow, do it,pls,pls do it...
ham Enjoy the showers of possessiveness poured on u by ur loved ones, bcoz in this world of lies, it is a golden gift to be loved truly..
ham Alright if you're sure, let me know when you're leaving
ham Some are lasting as much as 2 hours. You might get lucky.
ham Genius what's up. How your brother. Pls send his number to my skype.
spam Gr8 Poly tones 4 ALL mobs direct 2u rply with POLY TITLE to 8007 eg POLY BREATHE1 Titles: CRAZYIN, SLEEPINGWITH, FINEST, YMCA :getzed.co.uk POBox365O4W45WQ 300p
ham Thk some of em find wtc too far... Weiyi not goin... E rest i dunno yet... R ur goin 4 dinner den i might b able to join...
ham Don't forget who owns you and who's private property you are ... And be my good boy always .. *passionate kiss*
spam INTERFLORA - It's not too late to order Interflora flowers for christmas call 0800 505060 to place your order before Midnight tomorrow.
ham Oh god..taken the teeth?is it paining
spam ROMCAPspam Everyone around should be responding well to your presence since you are so warm and outgoing. You are bringing in a real breath of sunshine.
ham Then u ask darren go n pick u lor... But i oso sian tmr haf 2 meet lect...
ham No need to buy lunch for me.. I eat maggi mee..
spam Congratulations - Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08712103738 NOW! Only 10p per minute. BT-national-rate
ham Ok lor...
ham Oh right, ok. I'll make sure that i do loads of work during the day! got a really nasty cough today and is dry n shot so that should really help it!
ham Wife.how she knew the time of murder exactly
spam Send a logo 2 ur lover - 2 names joined by a heart. Txt LOVE NAME1 NAME2 MOBNO eg LOVE ADAM EVE 07123456789 to 87077 Yahoo! POBox36504W45WQ TxtNO 4 no ads 150p.
ham Howz that persons story
ham Thanx 4 sending me home...
ham Its normally hot mail. Com you see!
spam You've won tkts to the EURO2004 CUP FINAL or £800 CASH, to collect CALL 09058099801 b4190604, POBOX 7876150ppm
ham U sick still can go shopping?
ham Ya they are well and fine., BBD(pooja) full pimples..even she become quite black..and ur rite here its too cold, wearing sweatter..
ham Nice.nice.how is it working?
ham 1's reach home call me.
ham Were trying to find a Chinese food place around here
ham Easy mate, * guess the quick drink was bit ambitious.
ham BABE !!! I miiiiiiissssssssss you ! I need you !!! I crave you !!! :-( ... Geeee ... I'm so sad without you babe ... I love you ...
ham Ok thanx...
ham aathi..where are you dear..
ham Tunji, how's the queen? how are you doing. This is just wishing you a great day. Abiola.
ham Today iZ Yellow rose day. If u love my frndship give me 1 misscall & send this to ur frndZ & See how many miss calls u get. If u get 6missed U marry ur Lover.
ham Will be out of class in a few hours. Sorry
ham Wat time u finish ur lect today?
spam Free-message: Jamster!Get the crazy frog sound now! For poly text MAD1, for real text MAD2 to 88888. 6 crazy sounds for just 3 GBP/week! 16+only! T&C's apply
ham Sad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish. As I entered my cabin my PA said, '' Happy B'day Boss !!''. I felt special. She askd me 4 lunch. After lunch she invited me to her apartment. We went there. She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife, My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!
spam YOUR CHANCE TO BE ON A REALITY FANTASY SHOW call now = 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national = rate call
ham She's fine. Good to hear from you. How are you my dear? Happy new year oh.
ham Are you going to wipro interview today?
ham how tall are you princess?
ham I doubt you could handle 5 times per night in any case...
ham Haha... Hope ü can hear the receipt sound... Gd luck!
ham Your gonna be the death if me. I'm gonna leave a note that says its all robs fault. Avenge me.
ham Japanese Proverb: If one Can do it, U too Can do it, If none Can do it,U must do it Indian version: If one Can do it, LET HIM DO it.. If none Can do it,LEAVE it!! And finally Kerala version: If one can do it, Stop him doing it.. If none can do it, Make a strike against it ...
ham Today i'm not workin but not free oso... Gee... Thgt u workin at ur fren's shop ?
ham In life when you face choices Just toss a coin not becoz its settle the question But while the coin in the air U will know what your heart is hoping for. Gudni8
ham Do you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!
ham I want to be there so i can kiss you and feel you next to me
ham I am not at all happy with what you saying or doing
spam Adult 18 Content Your video will be with you shortly
ham Ok that would b lovely, if u r sure. Think about wot u want to do, drinkin, dancin, eatin, cinema, in, out, about... Up to u! Wot about ?
ham What I'm saying is if you haven't explicitly told nora I know someone I'm probably just not gonna bother
ham He says hi and to get your ass back to south tampa (preferably at a kegger)
ham Smith waste da.i wanna gayle.
ham Mum, i've sent you many many messages since i got here. I just want to know that you are actually getting them. Do enjoy the rest of your day.
ham Aight, tomorrow around <#> it is
ham House-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?
spam YOUR CHANCE TO BE ON A REALITY FANTASY SHOW call now = 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national = rate call.
ham I actually did for the first time in a while. I went to bed not too long after i spoke with you. Woke up at 7. How was your night?
ham See you there!
ham I dont understand your message.
ham Crucify is c not s. You should have told me earlier.
ham Idk. You keep saying that you're not, but since he moved, we keep butting heads over freedom vs. responsibility. And i'm tired. I have so much other shit to deal with that i'm barely keeping myself together once this gets added to it.
ham Fuck cedar key and fuck her (come over anyway tho)
ham twenty past five he said will this train have been to durham already or not coz i am in a reserved seat
spam Hey Boys. Want hot XXX pics sent direct 2 ur phone? Txt PORN to 69855, 24Hrs free and then just 50p per day. To stop text STOPBCM SF WC1N3XX
ham U still painting ur wall?
spam Last Chance! Claim ur £150 worth of discount vouchers today! Text SHOP to 85023 now! SavaMob, offers mobile! T Cs SavaMob POBOX84, M263UZ. £3.00 Sub. 16
ham Printer is cool. I mean groovy. Wine is groovying
ham Hi Harish's rent has been transfred to ur Acnt.
ham Anything lor is she coming?
ham Cbe is really good nowadays:)lot of shop and showrooms:)city is shaping good.
ham Ü still attending da talks?
ham No probs hon! How u doinat the mo?
ham K I'll take care of it
ham I take it we didn't have the phone callon Friday. Can we assume we won't have it this year now?
ham My battery is low babe
ham Shuhui has bought ron's present it's a swatch watch...
ham Yeah there's quite a bit left, I'll swing by tomorrow when I get up
ham Babe? You said 2 hours and it's been almost 4 ... Is your internet down ?
ham K I'll be sure to get up before noon and see what's what
ham K...k...yesterday i was in cbe .
ham Went to ganesh dress shop
spam pdate_Now - Double mins and 1000 txts on Orange tariffs. Latest Motorola, SonyEricsson & Nokia & Bluetooth FREE! Call MobileUpd8 on 08000839402 or call2optout/!YHL
ham Ü collecting ur laptop then going to configure da settings izzit?
ham If you r @ home then come down within 5 min
ham Aight, I should be there by 8 at the latest, probably closer to 7. Are jay and tyler down or should we just do two trips?
ham Come aftr <DECIMAL> ..now i m cleaning the house
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send CASH to 86688 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1
ham Bill, as in: Are there any letters for me. i’m expecting one from orange that isn’t a bill but may still say orange on it.
ham Tell me pa. How is pain de.
ham HI DARLIN I HOPE YOU HAD A NICE NIGHT I WISH I HAD COME CANT WAIT TO SEE YOU LOVE FRAN PS I WANT DIRTY ANAL SEX AND I WANT A 10 MAN GANG BANG
ham Ha. You don‘t know either. I did a a clever but simple thing with pears the other day, perfect for christmas.
ham Helloooo... Wake up..! "Sweet" "morning" "welcomes" "You" "Enjoy" "This Day" "with full of joy".. "GUD MRNG".
ham ALRITE
ham Why must we sit around and wait for summer days to celebrate. Such a magical sight when the worlds dressed in white. Oooooh let there be snow.
spam URGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09058094454 from land line. Claim 3030. Valid 12hrs only
ham How do you guys go to see movies on your side.
ham Sorry,in meeting I'll call later
ham You didn't have to tell me that...now i'm thinking. Plus he's going to stop all your runs
ham Kindly send some one to our flat before <DECIMAL> today.
spam Sorry! U can not unsubscribe yet. THE MOB offer package has a min term of 54 weeks> pls resubmit request after expiry. Reply THEMOB HELP 4 more info
ham Nothing lor... A bit bored too... Then y dun u go home early 2 sleep today...
ham What time should I tell my friend to be around?
ham Yes. that will be fine. Love you. Be safe.
ham Thanks chikku..:-) gud nyt:-*
ham Is xy in ur car when u picking me up?
ham Thanx 4 the time weve spent 2geva, its bin mint! Ur my Baby and all I want is u!xxxx
ham Yo, any way we could pick something up tonight?
ham I've not sent it. He can send me.
ham Fine am simply sitting.
ham Thts god's gift for birds as humans hav some natural gift frm god..
ham Are you coming to day for class.
ham Im done. Just studyn in library
ham Ok... U enjoy ur shows...
ham Anything...
ham Where wuld I be without my baby? The thought alone mite break me and I dont wanna go crazy but everyboy needs his lady xxxxxxxx
ham Wat's my dear doing? Sleeping ah?
ham Hi' Test on <#> rd ....
ham Only 2% students solved this CAT question in 'xam... 5+3+2= <#> 9+2+4= <#> 8+6+3= <#> then 7+2+5=????? Tell me the answer if u r brilliant...1thing.i got d answr.
ham Yo do you know anyone <#> or otherwise able to buy liquor? Our guy flaked and right now if we don't get a hold of somebody its just 4 loko all night
ham Yup n her fren lor. I'm meeting my fren at 730.
ham Yeah, we got one lined up for us
ham And stop wondering "wow is she ever going to stop tm'ing me ?!" because I will tm you whenever I want because you are MINE ... *laughs*
ham Lol yep did that yesterday. Already got my fireplace. Now its just another icon sitting there for me.
ham Hey i've booked the pilates and yoga lesson already... Haha
ham Are you ok. What happen to behave like this
spam You have 1 new message. Please call 08712400200.
ham My supervisor find 4 me one lor i thk his students. I havent ask her yet. Tell u aft i ask her.
ham Hello. No news on job, they are making me wait a fifth week! Yeah im up for some woozles and weasels... In exeter still, but be home about 3.
ham No message..no responce..what happend?
spam We currently have a message awaiting your collection. To collect your message just call 08718723815.
ham Hey babe, sorry i didn't get sooner. Gary can come and fix it cause he thinks he knows what it is but he doesn't go as far a Ptbo and he says it will cost <#> bucks. I don't know if it might be cheaper to find someone there ? We don't have any second hand machines at all right now, let me know what you want to do babe
ham make that 3! 4 fucks sake?! x
ham Leave it. U will always be ignorant.
ham Nope but i'll b going 2 sch on fri quite early lor cos mys sis got paper in da morn :-)
ham at bruce b downs & fletcher now
ham Where are you ? You said you would be here when I woke ... :-(
ham Hey now am free you can call me.
ham Tell me whos this pls:-)
spam URGENT! Your mobile was awarded a £1,500 Bonus Caller Prize on 27/6/03. Our final attempt 2 contact U! Call 08714714011
ham Think i might have to give it a miss. Am teaching til twelve, then have lecture at two. Damn this working thing.
ham Id have to check but there's only like 1 bowls worth left
ham Yes there were many sweets
ham I would but I'm still cozy. And exhausted from last night.nobody went to school or work. Everything is closed.
spam U have a secret admirer. REVEAL who thinks U R So special. Call 09065174042. To opt out Reply REVEAL STOP. 1.50 per msg recd. Cust care 07821230901
ham Buzzzz! *grins* Did I buzz your ass? Buzz your chest ? Buzz your cock ? Where do you keep your phone ? Is the vibrator on ? Did you feel it shake ?
ham Sir send to group mail check it.
ham I'm doing da intro covers energy trends n pros n cons... Brief description of nuclear fusion n oso brief history of iter n jet got abt 7 n half pages..
ham "NONE!NOWHERE IKNO DOESDISCOUNT!SHITINNIT"
ham You dont know you jabo me abi.
spam Do you ever notice that when you're driving, anyone going slower than you is an idiot and everyone driving faster than you is a maniac?
ham Not yet had..ya sapna aunty manege y'day hogidhe..chinnu full weak and swalpa black agidhane..
ham Are you being good, baby? :)
ham NEFT Transaction with reference number <#> for Rs. <DECIMAL> has been credited to the beneficiary account on <#> at <TIME> : <#>
ham Mostly sports type..lyk footbl,crckt..
ham Ma head dey swell oh. Thanks for making my day
ham U should make a fb list
ham Height of Confidence: All the Aeronautics professors wer calld & they wer askd 2 sit in an aeroplane. Aftr they sat they wer told dat the plane ws made by their students. Dey all hurried out of d plane.. Bt only 1 didnt move... He said:"if it is made by my students,this wont even start........ Datz confidence..
ham Sary just need Tim in the bollox &it hurt him a lot so he tol me!
ham Happy New Year Princess!
ham I'll text carlos and let you know, hang on
ham Don't worry, * is easy once have ingredients!
ham I love u 2 my little pocy bell I am sorry but I love u
ham Ok omw now, you at castor?
ham Yar lor... Keep raining non stop... Or u wan 2 go elsewhere?
spam Xmas Offer! Latest Motorola, SonyEricsson & Nokia & FREE Bluetooth or DVD! Double Mins & 1000 Txt on Orange. Call MobileUpd8 on 08000839402 or call2optout/4QF2
ham What u mean u almost done? Done wif sleeping? But i tot u going to take a nap.. Yup i send her liao so i'm picking her up at ard 4 smth lor..
ham 7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st "Ur Lovely Friendship"... good morning dear
ham Tonight? Yeah, I'd be down for that
ham What should i eat fo lunch senor
ham He said that he had a right giggle when he saw u again! You would possibly be the first person2die from NVQ, but think how much you could for!
ham No break time one... How... I come out n get my stuff fr ü?
spam Reply to win £100 weekly! What professional sport does Tiger Woods play? Send STOP to 87239 to end service
ham I'm there and I can see you, but you can't see me ? Maybe you should reboot ym ? I seen the buzz
ham Do you still have the grinder?
spam No 1 POLYPHONIC tone 4 ur mob every week! Just txt PT2 to 87575. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info
ham Love isn't a decision, it's a feeling. If we could decide who to love, then, life would be much simpler, but then less magical
spam HOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call
ham K.i did't see you.:)k:)where are you now?
ham So i'm doing a list of buyers.
ham No idea, I guess we'll work that out an hour after we're supposed to leave since as usual nobody has any interest in figuring shit out before the last second
ham Mm not entirely sure i understood that text but hey. Ho. Which weekend?
ham They released vday shirts and when u put it on it makes your bottom half naked instead of those white underwear.
ham Don know..he is watching film in computer..
ham No b4 Thursday
ham Oh, then your phone phoned me but it disconnected
ham Id onluy matters when getting on from offcampus
spam This message is free. Welcome to the new & improved Sex & Dogging club! To unsubscribe from this service reply STOP. msgs@150p 18+only
ham Excellent, I'll see what riley's plans are
ham I will see in half an hour
spam You've won tkts to the EURO2004 CUP FINAL or £800 CASH, to collect CALL 09058099801 b4190604, POBOX 7876150ppm
ham Ew are you one of them?
ham Also hi wesley how've you been
ham Ah you see. You have to be in the lingo. I will let you know wot on earth it is when has finished making it!
spam Loan for any purpose £500 - £75,000. Homeowners + Tenants welcome. Have you been previously refused? We can still help. Call Free 0800 1956669 or text back 'help'
spam Update_Now - 12Mths Half Price Orange line rental: 400mins...Call MobileUpd8 on 08000839402 or call2optout=J5Q
ham Imagine Life WITHOUT ME... see.. How fast u are searching me?Don't worry.. l'm always there To disturb U.. Goodnoon..:)
ham Hm good morning, headache anyone? :-)
ham Yeah no probs - last night is obviously catching up with you... Speak soon
spam FREE UNLIMITED HARDCORE PORN direct 2 your mobile Txt PORN to 69200 & get FREE access for 24 hrs then chrgd@50p per day txt Stop 2exit. This msg is free
ham I might go 2 sch. Yar at e salon now v boring.
ham <#> mins but i had to stop somewhere first.
ham <#> is fast approaching. So, Wish u a very Happy New Year Happy Sankranti Happy republic day Happy Valentines Day Happy Shivratri Happy Ugadi Happy Fools day Happy May Day Happy Independence Day, Happy Friendship,Mother,Father,Teachers,Childrens Day, & HAPPY BIRTHDAY 4 U. Happy Ganesh festival Happy Dasara Happy Diwali Happy Christmas <#> Good Mornings Afternoons, Evenings Nights. RememberI AM the first to WISHING U ALL THESE...your's Raj
ham One of the joys in lifeis waking up each daywith thoughts that somewhereSomeone cares enough tosend a warm morning greeting.. -
ham I didn't get the second half of that message
ham Wat time do u wan 2 meet me later?
ham I thank you so much for all you do with selflessness. I love you plenty.
ham Am in film ill call you later.
ham How dare you change my ring
ham You are a very very very very bad girl. Or lady.
ham I love ya too but try and budget your money better babe. Gary would freak on me if he knew
ham What part of "don't initiate" don't you understand
ham I finished my lunch already. U wake up already?
ham You still at the game?
ham You have got tallent but you are wasting.
ham What is your record for one night? :)
ham Also sir, i sent you an email about how to log into the usc payment portal. I.ll send you another message that should explain how things are back home. Have a great weekend.
ham gonna let me know cos comes bak from holiday that day. is coming. Don't4get2text me number.
ham Jokin only lar... :-) depends on which phone my father can get lor...
ham Aight, lemme know what's up
ham Get ready for <#> inches of pleasure...
ham Raji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.
ham ;-) ok. I feel like john lennon.
ham Cos darren say ü considering mah so i ask ü...
ham You are not bothering me but you have to trust my answers. Pls.
ham Wishing you and your family Merry "X" mas and HAPPY NEW Year in advance..
ham One day a crab was running on the sea shore..The waves came n cleared the footprints of the crab.. Crab asked: being my frnd y r u clearing my beautiful footprints? Waves replied: A fox was following ur footprints to catch you! thats y i cleared it off:) frndsship never lets u dwn :-) GUD nyt..
ham Aight what time you want me to come up?
ham Slaaaaave ! Where are you ? Must I summon you to me all the time now ? Don't you wish to come to me on your own anymore?
ham Your bill at 3 is £33.65 so thats not bad!
ham Let me know how it changes in the next 6hrs. It can even be appendix but you are out of that age range. However its not impossible. So just chill and let me know in 6hrs
ham Hello, yeah i've just got out of the bath and need to do my hair so i'll come up when i'm done, yeah?
ham So how's the weather over there?
ham Ok. Not much to do here though. H&M Friday, cant wait. Dunno wot the hell im gonna do for another 3 weeks! Become a slob- oh wait, already done that!
ham Die... Now i have e toot fringe again...
ham Lol they don't know about my awesome phone. I could click delete right now if I want.
ham Ok
ham Awesome question with a cute answer: Someone asked a boy "how is ur life?" . . He smiled & answered: . . "She is fine!" Gudnite
ham Please leave this topic..sorry for telling that..
ham Pls send me the correct name da.
ham What happened to our yo date?
spam EASTENDERS TV Quiz. What FLOWER does DOT compare herself to? D= VIOLET E= TULIP F= LILY txt D E or F to 84025 NOW 4 chance 2 WIN £100 Cash WKENT/150P16+
ham Webpage s not available!
ham Just woke up. Yeesh its late. But I didn't fall asleep til <#> am :/
spam You are now unsubscribed all services. Get tons of sexy babes or hunks straight to your phone! go to http://gotbabes.co.uk. No subscriptions.
ham Dear all, as we know <#> th is the <#> th birthday of our loving Gopalettan. We are planning to give a small gift on that day. Those who like to participate in that you are welcome. Please contact our admin team for more details
ham K..k...from tomorrow onwards started ah?
ham What u talking bout early morning? It's almost noon where your at!
ham Fine. Do you remember me.
spam Hi babe its Jordan, how r u? Im home from abroad and lonely, text me back if u wanna chat xxSP visionsms.com Text stop to stopCost 150p 08712400603
ham Ok. How many should i buy.
ham Sounds good, keep me posted
spam Get a brand new mobile phone by being an agent of The Mob! Plus loads more goodies! For more info just text MAT to 87021.
ham Ok. So april. Cant wait
ham Boy you best get yo ass out here quick
ham Ay wana meet on sat?ü wkg on sat?
ham I'm now but have to wait till 2 for the bus to pick me.
ham Apart from the one i told you about yesterday?
ham Ok lor... But buy wat?
ham Somebody should go to andros and steal ice
ham Don know. I did't msg him recently.
ham Take us out shopping and Mark will distract Isaiah.=D
ham Mum, hope you are having a great day. Hoping this text meets you well and full of life. Have a great day. Abiola
ham There is no sense in my foot and penis.
ham Okay but i thought you were the expert
ham *deep sigh* ... I miss you :-( ... I am really surprised you haven't gone to the net cafe yet to get to me ... Don't you miss me?
ham S.s:)i thinl role is like sachin.just standing. Others have to hit.
ham Have a great trip to India. And bring the light to everyone not just with the project but with everyone that is lucky to see you smile. Bye. Abiola
ham And very importantly, all we discuss is between u and i only.
ham K..k:)how about your training process?
ham Ok lor. I ned 2 go toa payoh 4 a while 2 return smth u wan 2 send me there or wat?
ham In da car park
ham I wish that I was with you. Holding you tightly. Making you see how important you are. How much you mean to me ... How much I need you ... In my life ...
ham So i asked how's anthony. Dad. And your bf
ham 'Wnevr i wana fal in luv vth my books, My bed fals in luv vth me..!'' . Yen madodu, nav pretsorginta, nammanna pretsovru important alwa....!!:) Gud eveB-).
ham What Today-sunday..sunday is holiday..so no work..
ham Am going to take bath ill place the key in window:-)
spam LORD OF THE RINGS:RETURN OF THE KING in store NOW!REPLY LOTR by 2 June 4 Chance 2 WIN LOTR soundtrack CDs StdTxtRate. Reply STOP to end txts
ham Dear, take care. I am just reaching home.love u a lot.
ham staff.science.nus.edu.sg/~phyhcmk/teaching/pc1323
ham Have you emigrated or something? Ok maybe 5.30 was a bit hopeful...
ham Olol i printed out a forum post by a guy with the exact same prob which was fixed with a gpu replacement. Hopefully they dont ignore that.
ham We walked from my moms. Right on stagwood pass right on winterstone left on victors hill. Address is <#>
ham Yo, you at jp and hungry like a mofo?
ham This is all just creepy and crazy to me.
ham Ok... I din get ur msg...
ham Tessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas
ham Pathaya enketa maraikara pa'
ham Even if he my friend he is a priest call him now
ham U so lousy, run already come back then half dead... Hee...
ham That's y i said it's bad dat all e gals know u... Wat u doing now?
ham Or remind me in a few hrs.
ham I had been hoping i would not have to send you this message. My rent is due and i dont have enough for it. My reserves are completely gone. Its a loan i need and was hoping you could her. The balance is <#> . Is there a way i could get that from you, till mid march when i hope to pay back.
ham Hi. Happy New Year. I dont mean to intrude but can you pls let me know how much tuition you paid last semester and how much this semester is. Thanks
ham Hello hun how ru? Its here by the way. Im good. Been on 2 dates with that guy i met in walkabout so far. We have to meet up soon. Hows everyone else?
ham Lol I was gonna last month. I cashed some in but I left <#> just in case. I was collecting more during the week cause they announced it on the blog.
spam Good Luck! Draw takes place 28th Feb 06. Good Luck! For removal send STOP to 87239 customer services 08708034412
ham Short But Cute : " Be a good person , but dont try to prove" ..... Gud mrng...
ham Just haven't decided where yet eh ?
ham Wat time liao, where still got.
ham Yes watching footie but worried we're going to blow it - Phil Neville?
ham I wait 4 ü inside da car park...
ham Uncle Abbey! Happy New Year. Abiola
ham Now am free call me pa.
ham R u saying i should re order the slippers cos i had to pay for returning it.
ham Stop knowing me so well!
ham Good evening! this is roger. How are you?
ham Small problem in auction:)punj now asking tiwary
spam Free entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+
ham He telling not to tell any one. If so treat for me hi hi hi
ham My uncles in Atlanta. Wish you guys a great semester.
spam 1st wk FREE! Gr8 tones str8 2 u each wk. Txt NOKIA ON to 8007 for Classic Nokia tones or HIT ON to 8007 for Polys. Nokia/150p Poly/200p 16+
ham U coming 2 pick me?
ham Thats cool. i liked your photos. You are very sexy!
ham would u fuckin believe it they didnt know i had thurs pre booked off so they re cancelled me AGAIN! that needs to b sacked
ham Haha better late than ever, any way I could swing by?
ham Ok. But i finish at 6.
spam LookAtMe!: Thanks for your purchase of a video clip from LookAtMe!, you've been charged 35p. Think you can do better? Why not send a video in a MMSto 32323.
ham I've been barred from all B and Q stores for life!?This twat in orange dungerees came up to me and asked if I wanted decking? So I got the first punch in!!
ham So no messages. Had food?
ham Ok going to sleep. Hope i can meet her.
ham Wat makes some people dearer is not just de happiness dat u feel when u meet them but de pain u feel when u miss dem!!!
ham Can you let me know details of fri when u find out cos I'm not in tom or fri. mentionned chinese. Thanks
ham You're right I have now that I think about it
ham Wat r u doing now?
ham Is ur lecture over?
spam sexy sexy cum and text me im wet and warm and ready for some porn! u up for some fun? THIS MSG IS FREE RECD MSGS 150P INC VAT 2 CANCEL TEXT STOP
ham Customer place i will call you
ham Not planned yet :)going to join company on jan 5 only.don know what will happen after that.
ham Boy; I love u Grl: Hogolo Boy: gold chain kodstini Grl: Agalla Boy: necklace madstini Grl: agalla Boy: Hogli 1 mutai eerulli kodthini! Grl: I love U kano;-)
ham Haha I heard that, text me when you're around
ham I.ll get there tomorrow and send it to you
ham "SHIT BABE.. THASA BIT MESSED UP.YEH, SHE SHUDVETOLD U. DID URGRAN KNOW?NEWAY, ILLSPEAK 2 U2MORO WEN IM NOT ASLEEP..."
ham Oh thats late! Well have a good night and i will give u a call tomorrow. Iam now going to go to sleep night night
ham "CHEERS U TEX MECAUSE U WEREBORED! YEAH OKDEN HUNNY R UIN WK SAT?SOUNDS LIKEYOUR HAVIN GR8FUN J! KEEP UPDAT COUNTINLOTS OF LOVEME XXXXX."
ham Sorry, in meeting I'll call you later
ham Yo! Howz u? girls never rang after india. L
ham Yeah but which is worse for i
spam Hard LIVE 121 chat just 60p/min. Choose your girl and connect LIVE. Call 09094646899 now! Cheap Chat UK's biggest live service. VU BCM1896WC1N3XX
ham I tagged MY friends that you seemed to count as YOUR friends.
spam Not heard from U4 a while. Call 4 rude chat private line 01223585334 to cum. Wan 2C pics of me gettin shagged then text PIX to 8552. 2End send STOP 8552 SAM xxx
ham Ok...
ham Long time. You remember me today.
ham Havent shopping now lor i juz arrive only
ham Thank u. IT BETTER WORK OUT CAUSE I WILL FEEL USED OTHERWISE
ham Are you up for the challenge? I know i am :)
ham How much did ur hdd casing cost.
ham Mystery solved! Just opened my email and he's sent me another batch! Isn't he a sweetie
ham I can't describe how lucky you are that I'm actually awake by noon
spam This is the 2nd time we have tried to contact u. U have won the £1450 prize to claim just call 09053750005 b4 310303. T&Cs/stop SMS 08718725756. 140ppm
ham TODAY is Sorry day.! If ever i was angry with you, if ever i misbehaved or hurt you? plz plz JUST SLAP URSELF Bcoz, Its ur fault, I'm basically GOOD
ham Cheers for the card ... Is it that time of year already?
spam HOT LIVE FANTASIES call now 08707509020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870..k
ham When people see my msgs, They think Iam addicted to msging... They are wrong, Bcoz They don\'t know that Iam addicted to my sweet Friends..!! BSLVYL
ham Ugh hopefully the asus ppl dont randomly do a reformat.
ham Haven't seen my facebook, huh? Lol!
ham Mah b, I'll pick it up tomorrow
ham Still otside le..u come 2morrow maga..
ham Do u still have plumbers tape and a wrench we could borrow?
spam Dear Voucher Holder, To claim this weeks offer, at you PC please go to http://www.e-tlp.co.uk/reward. Ts&Cs apply.
ham It vl bcum more difficult..
spam UR GOING 2 BAHAMAS! CallFREEFONE 08081560665 and speak to a live operator to claim either Bahamas cruise of£2000 CASH 18+only. To opt out txt X to 07786200117
ham Havent still waitin as usual... Ü come back sch oredi?
ham In meeting da. I will call you
ham K k :-):-) then watch some films.
ham Does cinema plus drink appeal tomo? * Is a fr thriller by director i like on at mac at 8.30.
ham There the size of elephant tablets & u shove um up ur ass!!
ham So many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. take cr da
ham My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!
ham Dunno i juz askin cos i got a card got 20% off 4 a salon called hair sense so i tot it's da one ü cut ur hair.
ham Good morning pookie pie! Lol hope I didn't wake u up
ham MAYBE IF YOU WOKE UP BEFORE FUCKING 3 THIS WOULDN'T BE A PROBLEM.
ham Happy birthday to you....dear.with lots of love.rakhesh NRI
ham Howz that persons story
spam This is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08712101358 NOW! Only 10p per min. BT-national-rate
ham X2 <#> . Are you going to get that
ham Hi neva worry bout da truth coz the truth will lead me 2 ur heart. Its the least a unique person like u deserve. Sleep tight or morning
spam UR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039.SkilGme.TsCs087147403231Winawk!Age16+£1.50perWKsub
ham Is ur paper today in e morn or aft?
ham I will lick up every drop :) are you ready to use your mouth as well?
ham And you! Will expect you whenever you text! Hope all goes well tomo
ham Great. P diddy is my neighbor and comes for toothpaste every morning
ham I av a new number, . Wil u only use this one,ta.
ham So its to be poking man everyday that they teach you in canada abi! How are you. Just saying hi.
ham 7 lor... Change 2 suntec... Wat time u coming?
ham No de.am seeing in online shop so that i asked.
ham Just curious because my cuz asked what I was up to
ham Nice.nice.how is it working?
ham Okay lor... Wah... like that def they wont let us go... Haha... What did they say in the terms and conditions?
ham Haha... Yup hopefully we will lose a few kg by mon. after hip hop can go orchard and weigh again
ham She's good. How are you. Where r u working now
ham Oh, yes, I've just been a little under the weather so i've kind of been coccooning at home
ham At home also.
ham This phone has the weirdest auto correct.
ham Oops my phone died and I didn't even know. Yeah I like it better.
ham Havent mus ask if u can 1st wat. Of meet 4 lunch den u n him meet can already lor. Or u wan 2 go ask da ge 1st then confirm w me asap?
ham She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,
ham OH YEAH,AND HAV A GREAT TIME IN NEWQUAY-SEND ME A POSTCARD !1 LOOK AFTER ALL THE GIRLS WHILE IM GONE(U KNOW THE 1IM TALKIN BOUT!)xx
ham We got a divorce. Lol. She.s here
ham What's ur pin?
ham Babe, have you got enough money to pick up bread and milk ? And I'll give you it back when you get home ?
ham I want snow. It's just freezing and windy.
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09066358361 from land line. Claim Y87. Valid 12hrs only
ham Come to mahal bus stop.. <DECIMAL>
ham Don know:)this week i'm going to tirunelvai da.
ham Me too baby! I promise to treat you well! I bet you will take good care of me...
ham Its like that hotel dusk game i think. You solve puzzles in a area thing
spam Thanks for your ringtone order, reference number X29. Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989180
ham Hi, my love! How goes that day? Fuck, this morning I woke and dropped my cell on the way down the stairs but it seems alright ... *phews* I miss you !
ham Well that must be a pain to catch
ham Sorry da thangam.it's my mistake.
ham I need... Coz i never go before
ham Rose for red,red for blood,blood for heart,heart for u. But u for me.... Send tis to all ur friends.. Including me.. If u like me.. If u get back, 1-u r poor in relation! 2-u need some 1 to support 3-u r frnd 2 many 4-some1 luvs u 5+- some1 is praying god to marry u.:-) try it....
ham Wife.how she knew the time of murder exactly
spam SIX chances to win CASH! From 100 to 20,000 pounds txt> CSH11 and send to 87575. Cost 150p/day, 6days, 16+ TsandCs apply Reply HL 4 info
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send COLLECT to 83600 only 150p/msg. CC: 08718720201 PO BOX 114/14 TCR/W1
ham I feel like a dick because I keep sleeping through your texts and facebook messages. Sup, you in town?
ham No plm i will come da. On the way.
ham Guess he wants alone time. We could just show up and watch when they do..
ham Height of recycling: Read twice- People spend time for earning money and the same money is spent for spending time!;-) Good morning.. keep smiling:-)
ham Yup ü not comin :-(
ham Yes, princess. Toledo.
ham Aight text me when you're back at mu and I'll swing by, need somebody to get the door for me
ham Ron say fri leh. N he said ding tai feng cant make reservations. But he said wait lor.
ham Good. No swimsuit allowed :)
ham Am okay. Will soon be over. All the best
ham A cute thought for friendship: "Its not necessary to share every secret with ur close Frnd, but watever u shared should be true"....
ham Ok i've sent u da latest version of da project.
ham Good Morning my Dear........... Have a great & successful day.
ham Pls accept me for one day. Or am begging you change the number.
ham Squeeeeeze!! This is christmas hug.. If u lik my frndshp den hug me back.. If u get 3 u r cute:) 6 u r luvd:* 9 u r so lucky;) None? People hate u:
ham Its ok, if anybody asks abt me, u tel them..:-P
ham Funny fact Nobody teaches volcanoes 2 erupt, tsunamis 2 arise, hurricanes 2 sway aroundn no 1 teaches hw 2 choose a wife Natural disasters just happens
ham * You gonna ring this weekend or wot?
ham Also track down any lighters you can find
ham Sorry, I can't help you on this.
ham Babe, I need your advice
ham I‘ll leave around four, ok?
ham Come to medical college at 7pm ......forward it da
ham K:)k..its good:)when are you going?
ham I can make lasagna for you... vodka...
ham HI ITS KATE CAN U GIVE ME A RING ASAP XXX
ham Who were those people ? Were you in a tour ? I thought you were doing that sofa thing you sent me ? Your curious sugar
ham No, but you told me you were going, before you got drunk!
ham He fucking chickened out. He messaged me he would be late and woould buzz me and then I didn't hear a word from him
spam Congratulations! Thanks to a good friend U have WON the £2,000 Xmas prize. 2 claim is easy, just call 08718726978 NOW! Only 10p per minute. BT-national-rate
ham I'm always looking for an excuse to be in the city.
ham Yup i'm still having coffee wif my frens... My fren drove she'll give me a lift...
ham O shore are you takin the bus
ham So u gonna get deus ex?
ham I will send them to your email. Do you mind <#> times per night?
spam 44 7732584351, Do you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705.
ham tap & spile at seven. * Is that pub on gas st off broad st by canal. Ok?
ham Ok then i come n pick u at engin?
ham Which is why i never wanted to tell you any of this. Which is why i'm so short with you and on-edge as of late.
ham Raviyog Peripherals bhayandar east
ham K actually can you guys meet me at the sunoco on howard? It should be right on the way
spam You have 1 new voicemail. Please call 08719181513.
ham MOON has come to color your dreams, STARS to make them musical and my SMS to give you warm and Peaceful Sleep. Good Night
ham Just finished eating. Got u a plate. NOT leftovers this time.
ham Thanx a lot...
ham Hurry home u big butt. Hang up on your last caller if u have to. Food is done and I'm starving. Don't ask what I cooked.
ham Lol your right. What diet? Everyday I cheat anyway. I'm meant to be a fatty :(
ham Its a great day. Do have yourself a beautiful one.
ham What happened in interview?
ham Solve d Case : A Man Was Found Murdered On <DECIMAL> . <#> AfterNoon. 1,His wife called Police. 2,Police questioned everyone. 3,Wife: Sir,I was sleeping, when the murder took place. 4.Cook: I was cooking. 5.Gardener: I was picking vegetables. 6.House-Maid: I went 2 d post office. 7.Children: We went 2 play. 8.Neighbour: We went 2 a marriage. Police arrested d murderer Immediately. Who's It? Reply With Reason, If U r Brilliant.
ham Badrith is only for chennai:)i will surely pick for us:)no competition for him.
ham I tot it's my group mate... Lucky i havent reply... Wat time do ü need to leave...
ham Hey you around? I've got enough for a half + the ten I owe you
ham Hey tmr maybe can meet you at yck
ham ALRITE SAM ITS NIC JUST CHECKIN THAT THIS IS UR NUMBER-SO IS IT?T.B*
ham They are just making it easy to pay back. I have <#> yrs to say but i can pay back earlier. You get?
ham Not to worry. I'm sure you'll get it.
ham The gas station is like a block away from my house, you'll drive right by it since armenia ends at swann and you have to take howard
spam Someone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058097189 NOW all will be revealed. POBox 6, LS15HB 150p
spam Camera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days
ham My tuition is at 330. Hm we go for the 1120 to 1205 one? Do you mind?
ham I'm not smoking while people use "wylie smokes too much" to justify ruining my shit
ham Dear good morning how you feeling dear
ham A little. Meds say take once every 8 hours. It's only been 5 but pain is back. So I took another. Hope I don't die
ham Beautiful tomorrow never comes.. When it comes, it's already TODAY.. In the hunt of beautiful tomorrow don't waste your wonderful TODAY.. GOODMORNING:)
ham Dunno lei ü all decide lor. How abt leona? Oops i tot ben is going n i msg him.
ham Hi there. We have now moved in2 our pub . Would be great 2 c u if u cud come up.
spam Todays Voda numbers ending 5226 are selected to receive a ?350 award. If you hava a match please call 08712300220 quoting claim code 1131 standard rates app
spam This message is free. Welcome to the new & improved Sex & Dogging club! To unsubscribe from this service reply STOP. msgs@150p 18 only
ham Honeybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)
ham Just do what ever is easier for you
spam RCT' THNQ Adrian for U text. Rgds Vatian
ham Stop calling everyone saying I might have cancer. My throat hurts to talk. I can't be answering everyones calls. If I get one more call I'm not babysitting on Monday
ham It'll be tough, but I'll do what I have to
ham IM GONNAMISSU SO MUCH!!I WOULD SAY IL SEND U A POSTCARD BUTTHERES ABOUTAS MUCH CHANCE OF MEREMEMBERIN ASTHERE IS OFSI NOT BREAKIN HIS CONTRACT!! LUV Yaxx
ham Ee msg na poortiyagi odalebeku: Hanumanji 7 name 1-Hanuman 2-Bajarangabali 3-Maruti 4-Pavanaputra 5-Sankatmochan 6-Ramaduth 7-Mahaveer ee 7 name <#> janarige ivatte kalisidare next saturday olage ondu good news keluviri...! Maretare inde 1 dodda problum nalli siguviri idu matra <#> % true.. Don't neglet.
ham HI DARLIN I FINISH AT 3 DO U 1 2 PICK ME UP OR MEET ME? TEXT BACK ON THIS NUMBER LUV KATE XXX
ham Set a place for me in your heart and not in your mind, as the mind easily forgets but the heart will always remember. Wish you Happy Valentines Day!
ham But i'm surprised she still can guess right lor...
ham Okie ü wan meet at bishan? Cos me at bishan now. I'm not driving today.
ham Oh ho. Is this the first time u use these type of words
ham HI DARLIN HOW WAS WORK DID U GET INTO TROUBLE? IJUST TALKED TO YOUR MUM ALL MORNING! I HAD A REALLY GOOD TIME LAST NIGHT IM GOIN OUT SOON BUT CALL ME IF U CAN
ham I know you are serving. I mean what are you doing now.
ham Huh... Hyde park not in mel ah, opps, got confused... Anyway, if tt's e best choice den we juz have to take it...
ham Oh gei. That happend to me in tron. Maybe ill dl it in 3d when its out
spam FREE MESSAGE Activate your 500 FREE Text Messages by replying to this message with the word FREE For terms & conditions, visit www.07781482378.com
ham I know girls always safe and selfish know i got it pa. Thank you. good night.
ham No worries, hope photo shoot went well. have a spiffing fun at workage.
ham I'm freezing and craving ice. Fml
ham Kay... Since we are out already
ham Eh sorry leh... I din c ur msg. Not sad already lar. Me watching tv now. U still in office?
ham Yo im right by yo work
ham Ok darlin i supose it was ok i just worry too much.i have to do some film stuff my mate and then have to babysit again! But you can call me there.xx
ham She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,
ham I don wake since. I checked that stuff and saw that its true no available spaces. Pls call the embassy or send a mail to them.
ham Nope... Juz off from work...
ham Huh so fast... Dat means u havent finished painting?
ham what number do u live at? Is it 11?
ham No we put party 7 days a week and study lightly, I think we need to draw in some custom checkboxes so they know we're hardcore
ham Sac will score big hundred.he is set batsman:-)
ham Send me yetty's number pls.
ham How much it will cost approx . Per month.
ham Ok... The theory test? when are ü going to book? I think it's on 21 may. Coz thought wanna go out with jiayin. But she isnt free
spam You are being contacted by our dating service by someone you know! To find out who it is, call from a land line 09050000928. PoBox45W2TG150P
ham That's fine, have him give me a call if he knows what he wants or has any questions
ham Sorry, got a late start, we're on the way
ham Then u go back urself lor...
ham I AM AT THE GAS STATION. GO THERE.
ham K, if u bored up just come to my home..
ham Babe !!!! I LOVE YOU !!!! *covers your face in kisses*
ham Like I made him throw up when we were smoking in our friend's car one time, it was awesome
ham Still i have not checked it da. . .
ham You will go to walmart. I.ll stay.
ham I haven't forgotten you, i might have a couple bucks to send you tomorrow, k? I love ya too
ham Oh great. I.ll disturb him more so that we can talk.
ham Reverse is cheating. That is not mathematics.
ham U're welcome... Caught u using broken english again...
ham No problem baby. Is this is a good time to talk? I called and left a message.
ham Sorry, I'll call later
ham Oh is it! Which brand?
ham Sorry i cant take your call right now. It so happens that there r 2waxsto do wat you want. She can come and ill get her medical insurance. And she'll be able to deliver and have basic care. I'm currently shopping for the right medical insurance for her. So just give me til friday morning. Thats when i.ll see the major person that can guide me to the right insurance.
ham At what time are you coming.
ham Call him and say you not coming today ok and tell them not to fool me like this ok
ham I emailed yifeng my part oredi.. Can ü get it fr him..
ham R u sure they'll understand that! Wine * good idea just had a slurp!
ham Minimum walk is 3miles a day.
ham Ok not a problem will get them a taxi. C ing tomorrow and tuesday. On tuesday think we r all going to the cinema.
ham Brainless Baby Doll..:-D;-), vehicle sariyag drive madoke barolla..
ham I don't run away frm u... I walk slowly & it kills me that u don't care enough to stop me...
spam Sorry I missed your call let's talk when you have the time. I'm on 07090201529
ham Please attend the phone:)
ham You only hate me. You can call any but you didnt accept even a single call of mine. Or even you messaged
ham No messages on her phone. I'm holding it now
ham Can... I'm free...
ham Yo my trip got postponed, you still stocked up?
ham Sorry, I'll call later
ham I am waiting for your call sir.
ham Hey what are you doing. Y no reply pa..
ham Hey elaine, is today's meeting still on?
ham Sorry i've not gone to that place. I.ll do so tomorrow. Really sorry.
ham Most of the tiime when i don't let you hug me it's so i don't break into tears.
ham Tomorrow i am not going to theatre. . . So i can come wherever u call me. . . Tell me where and when to come tomorrow
ham And now electricity just went out fml.
ham Looks like you found something to do other than smoke, great job!
ham Also andros ice etc etc
ham :)
ham Good afternon, my love. How are today? I hope your good and maybe have some interviews. I wake and miss you babe. A passionate kiss from across the sea
ham Yup. Wun believe wat? U really neva c e msg i sent shuhui?
ham Hows that watch resizing
ham Dear umma she called me now :-)
ham Just finished. Missing you plenty
spam complimentary 4 STAR Ibiza Holiday or £10,000 cash needs your URGENT collection. 09066364349 NOW from Landline not to lose out! Box434SK38WP150PPM18+
ham Well, I meant as opposed to my drunken night of before
ham K... Must book a not huh? so going for yoga basic on sunday?
spam FREE MSG:We billed your mobile number by mistake from shortcode 83332.Please call 08081263000 to have charges refunded.This call will be free from a BT landline
ham Ok can...
ham Oops - am at my mum's in somerset... Bit far! Back tomo, see you soon x
ham So u workin overtime nigpun?
ham Same as kallis dismissial in 2nd test:-).
ham O. Guess they both got screwd
spam Please CALL 08712402972 immediately as there is an urgent message waiting for you
ham I'm in a meeting, call me later at
ham What r u cooking me for dinner?
ham Ok thanx...
ham Bull. Your plan was to go floating off to IKEA with me without a care in the world. So i have to live with your mess another day.
ham Then i buy.
spam URGENT! Your Mobile number has been awarded with a £2000 Bonus Caller Prize. Call 09058095201 from land line. Valid 12hrs only
ham Heehee that was so funny tho
ham It only does simple arithmetic not percentages.
ham Yeah we wouldn't leave for an hour at least, how's 4 sound?
spam As a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066364589
ham Thanks honey. Have a great day.
ham 'An Amazing Quote'' - "Sometimes in life its difficult to decide whats wrong!! a lie that brings a smile or the truth that brings a tear...."
ham Good night my dear.. Sleepwell&Take care
ham Then ü ask dad to pick ü up lar... Ü wan 2 stay until 6 meh...
ham Jus chillaxin, what up
ham "HEY DAS COOL... IKNOW ALL 2 WELLDA PERIL OF STUDENTFINANCIAL CRISIS!SPK 2 U L8R."
ham Beautiful Truth against Gravity.. Read carefully: "Our heart feels light when someone is in it.. But it feels very heavy when someone leaves it.." GOODMORNING
spam Do you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705
ham Whats that coming over the hill..... Is it a monster! Hope you have a great day. Things r going fine here, busy though!
ham Joy's father is John. Then John is the ____ of Joy's father. If u ans ths you hav <#> IQ. Tis s IAS question try to answer.
ham Only once then after ill obey all yours.
ham No she didnt. I will search online and let you know.
ham Where do you need to go to get it?
ham No pic. Please re-send.
ham He remains a bro amongst bros
ham Uhhhhrmm isnt having tb test bad when youre sick
ham But i haf enuff space got like 4 mb...
spam LIFE has never been this much fun and great until you came in. You made it truly special for me. I won't forget you! enjoy @ one gbp/sms
spam Do you want a new Video phone? 600 anytime any network mins 400 Inclusive Video calls AND downloads 5 per week Free delTOMORROW call 08002888812 or reply NOW
spam As a valued customer, I am pleased to advise you that following recent review of your Mob No. you are awarded with a £1500 Bonus Prize, call 09066368470
spam Welcome! Please reply with your AGE and GENDER to begin. e.g 24M
spam Freemsg: 1-month unlimited free calls! Activate SmartCall Txt: CALL to No: 68866. Subscriptn3gbp/wk unlimited calls Help: 08448714184 Stop?txt stop landlineonly
spam Had your mobile 10 mths? Update to latest Orange camera/video phones for FREE. Save £s with Free texts/weekend calls. Text YES for a callback orno to opt out
spam Am new 2 club & dont fink we met yet Will B gr8 2 C U Please leave msg 2day wiv ur area 09099726553 reply promised CARLIE x Calls£1/minMobsmore LKPOBOX177HP51FL
ham True. Its easier with her here.
ham Sure but since my parents will be working on Tuesday I don't really need a cover story
ham Haha okay... Today weekend leh...
ham "Hi darlin did youPhone me? Im atHome if youwanna chat."
ham I don't know jack shit about anything or i'd say/ask something helpful but if you want you can pretend that I did and just text me whatever in response to the hypotheticalhuagauahahuagahyuhagga
ham You've always been the brainy one.
ham Yeah if we do have to get a random dude we need to change our info sheets to PARTY <#> /7 NEVER STUDY just to be safe
spam Camera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.
ham Christmas is An occasion that is Celebrated as a Reflection of UR... Values..., Desires..., Affections...& Traditions.... Have an ideal Christmas...
ham Sending you greetings of joy and happiness. Do have a gr8 evening
ham "Hi darlin i cantdo anythingtomorrow as myparents aretaking me outfor a meal. when are u free? Katexxx"
ham If india win or level series means this is record:)
ham Then what about further plan?
ham Its good to hear from you
ham awesome, how do I deal with the gate? Charles told me last night but, uh, yeah
ham What time you thinkin of goin?
spam Get a FREE mobile video player FREE movie. To collect text GO to 89105. Its free! Extra films can be ordered t's and c's apply. 18 yrs only
spam Save money on wedding lingerie at www.bridal.petticoatdreams.co.uk Choose from a superb selection with national delivery. Brought to you by WeddingFriend
ham Your board is working fine. The issue of overheating is also reslove. But still software inst is pending. I will come around 8'o clock.
ham Yes but I don't care cause I know its there!
ham wiskey Brandy Rum Gin Beer Vodka Scotch Shampain Wine "KUDI"yarasu dhina vaazhthukkal. ..
ham Mon okie lor... Haha, best is cheap n gd food la, ex oso okie... Depends on whether wana eat western or chinese food... Den which u prefer...
ham Sitting ard nothing to do lor. U leh busy w work?
ham Its <#> k here oh. Should i send home for sale.
ham Sorry. || mail? ||
ham Ya just telling abt tht incident..
ham Yes we were outside for like 2 hours. And I called my whole family to wake them up cause it started at 1 am
ham Ugh just got outta class
ham Nowadays people are notixiquating the laxinorficated opportunity for bambling of entropication.... Have you ever oblisingately opted ur books for the masteriastering amplikater of fidalfication? It is very champlaxigating, i think it is atrocious.. Wotz Ur Opinion???? Junna
ham I dont have any of your file in my bag..i was in work when you called me.i 'll tell you if i find anything in my room.
ham No need lar. Jus testing e phone card. Dunno network not gd i thk. Me waiting 4 my sis 2 finish bathing so i can bathe. Dun disturb u liao u cleaning ur room.
ham Ok. I.ll do you right later.
ham Friendship poem: Dear O Dear U R Not Near But I Can Hear Dont Get Fear Live With Cheer No More Tear U R Always my Dear. Gud ni8
ham Have your lunch and come quickly and open the door:)
spam Not heard from U4 a while. Call me now am here all night with just my knickers on. Make me beg for it like U did last time 01223585236 XX Luv Nikiyu4.net
ham I am back. Bit long cos of accident on a30. Had to divert via wadebridge.I had a brilliant weekend thanks. Speak soon. Lots of love
ham K.. I yan jiu liao... Sat we can go 4 bugis vill one frm 10 to 3 den hop to parco 4 nb. Sun can go cine frm 1030 to 2, den hop to orc mrt 4 hip hop at 4...
spam Bloomberg -Message center +447797706009 Why wait? Apply for your future http://careers. bloomberg.com
ham i am seeking a lady in the street and a freak in the sheets. Is that you?
ham My phone
ham Haha figures, well I found the piece and priscilla's bowl
ham Actually fuck that, just do whatever, do find an excuse to be in tampa at some point before january though
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050001808 from land line. Claim M95. Valid12hrs only
ham yay! finally lol. i missed our cinema trip last week :-(
ham All day working day:)except saturday and sunday..
ham aathi..where are you dear..
ham Heart is empty without love.. Mind is empty without wisdom.. Eyes r empty without dreams & Life is empty without frnds.. So Alwys Be In Touch. Good night & sweet dreams
ham I think I‘m waiting for the same bus! Inform me when you get there, if you ever get there.
ham You getting back any time soon?
ham , how's things? Just a quick question.
ham Night has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng
ham I can probably come by, everybody's done around <#> right?
ham I got it before the new year cos yetunde said she wanted to surprise you with it but when i didnt see money i returned it mid january before the <#> day return period ended.
ham I can ask around but there's not a lot in terms of mids up here
ham Be sure to check your yahoo email. We sent photos yesterday
ham What was she looking for?
ham Wherre's my boytoy ? :-(
spam Do you want a NEW video phone750 anytime any network mins 150 text for only five pounds per week call 08000776320 now or reply for delivery tomorrow
ham Hello, my love! How goes that day ? I wish your well and fine babe and hope that you find some job prospects. I miss you, boytoy ... *a teasing kiss*
ham Tell my bad character which u Dnt lik in me. I'll try to change in <#> . I ll add tat 2 my new year resolution. Waiting for ur reply.Be frank...good morning.
ham No:-)i got rumour that you going to buy apartment in chennai:-)
ham Yeah, probably earlier than that
ham Change windows logoff sound..
ham Still i have not checked it da. . .
ham I'm also came to room.
ham Huh but i got lesson at 4 lei n i was thinkin of going to sch earlier n i tot of parkin at kent vale...
ham Ok.
ham I will reach office around <DECIMAL> . & my mobile have problem. You cann't get my voice. So call you asa i'll free
ham Cool, text me when you head out
spam You are being contacted by our dating service by someone you know! To find out who it is, call from a land line 09050000878. PoBox45W2TG150P
spam Wan2 win a Meet+Greet with Westlife 4 U or a m8? They are currently on what tour? 1)Unbreakable, 2)Untamed, 3)Unkempt. Text 1,2 or 3 to 83049. Cost 50p +std text
ham Happy birthday... May u find ur prince charming soon n dun work too hard...
ham Oh, the grand is having a bit of a party but it doesn't mention any cover charge so it's probably first come first served
ham You said to me before i went back to bed that you can't sleep for anything.
ham I hope you arnt pissed off but id would really like to see you tomorrow. Love me xxxxxxxxxxxxxX
spam [email protected] (Bank of Granite issues Strong-Buy) EXPLOSIVE PICK FOR OUR MEMBERS *****UP OVER 300% *********** Nasdaq Symbol CDGT That is a $5.00 per..
ham says the <#> year old with a man and money. I'm down to my last <#> . Still waiting for that check.
ham I will come to ur home now
ham Free any day but i finish at 6 on mon n thurs...
ham Will you be here for food
ham life alle mone,eppolum oru pole allalo
ham Nite...
ham Two fundamentals of cool life: "Walk, like you are the KING"...! OR "Walk like you Dont care,whoever is the KING"!... Gud nyt
ham Camera quite good, 10.1mega pixels, 3optical and 5digital dooms. Have a lovely holiday, be safe and i hope you hav a good journey! Happy new year to you both! See you in a couple of weeks!
ham Hi Petey!noim ok just wanted 2 chat coz avent spoken 2 u 4 a long time-hope ur doin alrite.have good nit at js love ya am.x
ham I just saw ron burgundy captaining a party boat so yeah
ham I'm serious. You are in the money base
ham Already one guy loving you:-.
ham Staff of placement training in Amrita college.
ham I always chat with you. In fact i need money can you raise me?
ham I'm job profile seems like bpo..
ham Well, I was about to give up cos they all said no they didn‘t do one nighters. I persevered and found one but it is very cheap so i apologise in advance. It is just somewhere to sleep isnt it?
ham So you think i should actually talk to him? Not call his boss in the morning? I went to this place last year and he told me where i could go and get my car fixed cheaper. He kept telling me today how much he hoped i would come back in, how he always regretted not getting my number, etc.
ham Are you willing to go for apps class.
ham Hanging out with my brother and his family
ham No it will reach by 9 only. She telling she will be there. I dont know
ham Hey... are you going to quit soon? Xuhui and i working till end of the month
ham Im sorry bout last nite it wasnt ur fault it was me, spouse it was pmt or sumthin! U 4give me? I think u shldxxxx
ham Try neva mate!!
ham Yeah that'd pretty much be the best case scenario
ham I not free today i haf 2 pick my parents up tonite...
ham "HEY BABE! FAR 2 SPUN-OUT 2 SPK AT DA MO... DEAD 2 DA WRLD. BEEN SLEEPING ON DA SOFA ALL DAY, HAD A COOL NYTHO, TX 4 FONIN HON, CALL 2MWEN IM BK FRMCLOUD 9! J X"
ham Should i send you naughty pix? :)
spam You are a £1000 winner or Guaranteed Caller Prize, this is our Final attempt to contact you! To Claim Call 09071517866 Now! 150ppmPOBox10183BhamB64XE
spam Xmas & New Years Eve tickets are now on sale from the club, during the day from 10am till 8pm, and on Thurs, Fri & Sat night this week. They're selling fast!
ham Tyler (getting an 8th) has to leave not long after 9, can you get here in like an hour?
ham Prepare to be pounded every night...
ham Actually, my mobile is full of msg. And i m doing a work online, where i need to send them <#> sent msg i wil explain u later.
ham Sorry, I'll call later
ham Good evening! How are you?
ham I'm at home. Please call
ham Oic cos me n my sis got no lunch today my dad went out... So dunno whether 2 eat in sch or wat...
ham Mmmmm ... It was sooooo good to wake to your words this morning, my Love!! Mmmm fuck ... I love you too, my Lion ... *devouring kiss from across the sea*
ham We are pleased to inform that your application for Airtel Broadband is processed successfully. Your installation will happen within 3 days.
ham What happen dear. Why you silent. I am tensed
ham I'll get there at 3, unless you guys want me to come some time sooner
ham If you are not coughing then its nothing
ham Ü come lt 25 n pass to me lar
ham I'm e person who's doing e sms survey...
ham Lol ok ill try to send. Be warned Sprint is dead slow. You'll prolly get it tomorrow
ham Thank You meet you monday
ham SO IS TH GOWER MATE WHICH IS WHERE I AM!?! HOW R U MAN? ALL IS GOOD IN WALES ILL B BACK MORROW. C U THIS WK? WHO WAS THE MSG 4? RANDOM!
spam Rock yr chik. Get 100's of filthy films &XXX pics on yr phone now. rply FILTH to 69669. Saristar Ltd, E14 9YT 08701752560. 450p per 5 days. Stop2 cancel
ham Single line with a big meaning::::: "Miss anything 4 ur "Best Life" but, don't miss ur best life for anything... Gud nyt...
ham I got like $ <#> , I can get some more later though. Get whatever you feel like
ham Dad wanted to talk about the apartment so I got a late start, omw now
ham I love you both too :-)
ham Lol u still feeling sick?
ham Din i tell u jus now 420
ham am up to my eyes in philosophy
spam From next month get upto 50% More Calls 4 Ur standard network charge 2 activate Call 9061100010 C Wire3.net 1st4Terms PoBox84 M26 3UZ Cost £1.50 min MobcudB more
ham Ok lor. I'm in town now lei.
ham I had it already..sabarish asked me to go..
ham No da. . Vijay going to talk in jaya tv
spam URGENT! We are trying to contact U Todays draw shows that you have won a £800 prize GUARANTEED. Call 09050000460 from land line. Claim J89. po box245c2150pm
ham Lol I know! Hey someone did a great inpersonation of flea on the forums. I love it!
spam Text BANNEDUK to 89555 to see! cost 150p textoperator g696ga 18+ XXX
ham Still chance there. If you search hard you will get it..let have a try :)
spam Auction round 4. The highest bid is now £54. Next maximum bid is £71. To bid, send BIDS e. g. 10 (to bid £10) to 83383. Good luck.
ham Do you always celebrate NY's with your family ?
ham We know TAJ MAHAL as symbol of love. But the other lesser known facts 1. Mumtaz was Shahjahan's 4th wife, out of his 7 wifes. 2. Shahjahan killed Mumtaz's husband to marry her. 3. Mumtaz died in her <#> th delivery. 4. He then married Mumtaz's sister. Question arises where the Hell is the LOVE?:-| -The Great Hari-
ham Its ok..come to my home it vl nice to meet and v can chat..
spam Collect your VALENTINE'S weekend to PARIS inc Flight & Hotel + £200 Prize guaranteed! Text: PARIS to No: 69101. www.rtf.sphosting.com
ham Sent me de webadres for geting salary slip
ham She's fine. Sends her greetings
spam Customer Loyalty Offer:The NEW Nokia6650 Mobile from ONLY £10 at TXTAUCTION! Txt word: START to No: 81151 & get yours Now! 4T&Ctxt TC 150p/MTmsg
ham But you dint in touch with me.
ham Yup, leaving right now, be back soon
spam You won't believe it but it's true. It's Incredible Txts! Reply G now to learn truly amazing things that will blow your mind. From O2FWD only 18p/txt
ham Yeah sure I'll leave in a min
ham And do you have any one that can teach me how to ship cars.
ham The sign of maturity is not when we start saying big things.. But actually it is, when we start understanding small things... *HAVE A NICE EVENING* BSLVYL
ham Yeah confirmed for you staying at that weekend
ham They said ü dun haf passport or smth like dat.. Or ü juz send to my email account..
ham Multiply the numbers independently and count decimal points then, for the division, push the decimal places like i showed you.
ham Have a lovely night and when you wake up to see this message, i hope you smile knowing all is as should be. Have a great morning
ham Ard 4 lor...
ham You are right. Meanwhile how's project twins comin up
ham I sent your maga that money yesterday oh.
spam Hi 07734396839 IBH Customer Loyalty Offer: The NEW NOKIA6600 Mobile from ONLY £10 at TXTAUCTION!Txt word:START to No:81151 & get Yours Now!4T&
ham Heart is empty without love.. Mind is empty without wisdom.. Eyes r empty without dreams & Life is empty without frnds.. So Alwys Be In Touch. Good night & sweet dreams
spam I am hot n horny and willing I live local to you - text a reply to hear strt back from me 150p per msg Netcollex LtdHelpDesk: 02085076972 reply Stop to end
ham Our ride equally uneventful - not too many of those pesky cyclists around at that time of night ;).
ham If you were/are free i can give. Otherwise nalla adi entey nattil kittum
ham I've sent my wife your text. After we buy them she'll tell you what to do. So just relax. We should go get them this wkend.
ham I am in escape theatre now. . Going to watch KAVALAN in a few minutes
ham How much would it cost to hire a hitman
ham I anything lor...
ham Sorry, I'll call later
spam Do you want a New Nokia 3510i Colour Phone Delivered Tomorrow? With 200 FREE minutes to any mobile + 100 FREE text + FREE camcorder Reply or Call 08000930705
ham Huh but i cant go 2 ur house empty handed right?
ham Good morning princess! Happy New Year!
spam Congratulations YOU'VE Won. You're a Winner in our August £1000 Prize Draw. Call 09066660100 NOW. Prize Code 2309.
ham Aight, we'll head out in a few
ham Then wat r u doing now? Busy wif work?
ham I know you mood off today
ham Jay told me already, will do
ham Cps is causing the outages to conserve energy.
ham I'm not sure, I was just checking out what was happening around the area
ham Hey morning what you come to ask:-) pa...
ham Jordan got voted out last nite!
ham That means you got an A in epi, she.s fine. She.s here now.
ham I have no idea where you are
ham Pls come quick cant bare this.
ham Joy's father is John. Then John is the ____ of Joy's father. If u ans ths you hav <#> IQ. Tis s IAS question try to answer.
ham Call me. I m unable to cal. Lets meet bhaskar, and deep
ham No. I.ll meet you in the library
ham K, my roommate also wants a dubsack and another friend may also want some so plan on bringing extra, I'll tell you when they know for sure
ham Depends on individual lor e hair dresser say pretty but my parents say look gong. U kaypoh.. I also dunno wat she collecting.
ham Ok c ü then.
ham I enjoy watching and playing football and basketball. Anything outdoors. And you?
ham Can you please ask macho what his price range is, does he want something new or used plus it he only interfued in the blackberry bold <#> or any bb
ham Sorry sent blank msg again. Yup but trying 2 do some serious studying now.
ham Hey check it da. I have listed da.
spam 8007 25p 4 Alfie Moon's Children in Need song on ur mob. Tell ur m8s. Txt TONE CHARITY to 8007 for nokias or POLY CHARITY for polys :zed 08701417012 profit 2 charity
ham I meant as an apology from me for texting you to get me drugs at <#> at night
ham That means from february to april i'll be getting a place to stay down there so i don't have to hustle back and forth during audition season as i have since my sister moved away from harlem.
ham Goin to workout lor... Muz lose e fats...
ham Damn, poor zac doesn't stand a chance
ham No message..no responce..what happend?
ham I want to tel u one thing u should not mistake me k THIS IS THE MESSAGE THAT YOU SENT:)
ham Yeah right! I'll bring my tape measure fri!
ham Still chance there. If you search hard you will get it..let have a try :)
ham Meeting u is my work. . . Tel me when shall i do my work tomorrow
ham Should I head straight there or what
spam Get the official ENGLAND poly ringtone or colour flag on yer mobile for tonights game! Text TONE or FLAG to 84199. Optout txt ENG STOP Box39822 W111WX £1.50
ham Thank you princess! You are so sexy...
ham Oooh I got plenty of those!
ham Hui xin is in da lib.
ham Its a big difference. <#> versus <#> every <#> hrs
ham It's not that you make me cry. It's just that when all our stuff happens on top of everything else, it pushes me over the edge. You don't underdtand how often i cry over my sorry, sorry life.
ham "ME 2 BABE I FEEL THE SAME LETS JUST 4GET ABOUT IT+BOTH TRY +CHEER UP+NOT FIT SOO MUCHXXLOVE U LOCAXX"
ham You know what hook up means right?
spam Customer service announcement. We recently tried to make a delivery to you but were unable to do so, please call 07090298926 to re-schedule. Ref:9307622
ham Wat's da model num of ur phone?
ham He's really into skateboarding now despite the fact that he gets thrown off of it and winds up with bandages and shit all over his arms every five minutes
spam You can stop further club tones by replying "STOP MIX" See my-tone.com/enjoy. html for terms. Club tones cost GBP4.50/week. MFL, PO Box 1146 MK45 2WT (2/3)
ham My house here e sky quite dark liao... If raining then got excuse not 2 run already rite... Hee...
ham Sorry, left phone upstairs. OK, might be hectic but would be all my birds with one fell swoop. It's a date.
ham * Thought I didn't see you.
spam wamma get laid?want real doggin locations sent direct to your mobile? join the UKs largest dogging network. txt dogs to 69696 now!nyt. ec2a. 3lp £1.50/msg.
ham Carlos says we can pick up from him later so yeah we're set
ham Hey babe, my friend had to cancel, still up for a visit ?
ham As per your request 'Maangalyam (Alaipayuthe)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
ham Hmm ill have to think about it... ok you're forgiven! =D
ham We are hoping to get away by 7, from Langport. You still up for town tonight?
ham Want to send me a virtual hug?... I need one
ham Probably not, still going over some stuff here
ham It has issues right now. Ill fix for her by tomorrow.
ham Why i come in between you people
ham Senthil group company Apnt 5pm.
ham Oh really?? Did you make it on air? What's your talent?
ham Studying. But i.ll be free next weekend.
ham R u here yet? I'm wearing blue shirt n black pants.
ham Wait.i will come out.. <#> min:)
ham I will reach ur home in <#> minutes
ham Well then you have a great weekend!
ham What are you doing in langport? Sorry, but I'll probably be in bed by 9pm. It sucks being ill at xmas! When do you and go2sri lanka?
ham Frnd s not juz a word.....not merely a relationship.....its a silent promise which says ... " I will be with YOU " Wherevr.. Whenevr.. Forevr... Gudnyt dear..
ham Huh? 6 also cannot? Then only how many mistakes?
ham Ha... U jus ate honey ar? So sweet...
ham I'm turning off my phone. My moms telling everyone I have cancer. And my sister won't stop calling. It hurts to talk. Can't put up with it. See u when u get home. Love u
ham Honey ? Sweetheart ? Darling ? Sexy buns ? Sugar plum ? Loverboy ? I miss you, boytoy ... *smacks your ass* Did you go to the gym too ?
ham Thanks for loving me so. You rock
ham Yeah imma come over cause jay wants to do some drugs
ham Ok thanx... Take care then...
ham Yup. Thk of u oso boring wat.
ham came to look at the flat, seems ok, in his 50s? * Is away alot wiv work. Got woman coming at 6.30 too.
ham Moji just informed me that you saved our lives. Thanks.
spam You have won a Nokia 7250i. This is what you get when you win our FREE auction. To take part send Nokia to 86021 now. HG/Suite342/2Lands Row/W1JHL 16+
ham Whos this am in class:-)
ham Hey r ü still online? I've finished the formatting...
ham Great! So what attracts you to the brothas?
spam Promotion Number: 8714714 - UR awarded a City Break and could WIN a £200 Summer Shopping spree every WK. Txt STORE to 88039 . SkilGme. TsCs087147403231Winawk!Age16 £1.50perWKsub
ham Stupid.its not possible
ham I cant pick the phone right now. Pls send a message
ham LOL what happens in Vegas stays in vegas
ham Hello, hello, hi lou sorry it took so long 2 reply- I left mobile at friends in Lancaster, just got it bak Neway im sorry I couldnt make ur bday 2 hun!
ham When did i use soc... I use it only at home... Ü dunno how 2 type it in word ar...
ham Dad says hurry the hell up
ham Wake me up at <#> am morning:)
ham I get out of class in bsn in like <#> minutes, you know where advising is?
ham Great! I shoot big loads so get ready!
ham I'll meet you in the lobby
ham You still coming tonight?
ham What happen dear tell me
ham Sir, i am waiting for your call, once free please call me.
ham No i am not having not any movies in my laptop
ham I was about to do it when i texted. I finished a long time ago and showered and er'ything!
ham Ok im not sure what time i finish tomorrow but i wanna spend the evening with you cos that would be vewy vewy lubly! Love me xxx
ham Hello, As per request from <#> Rs.5 has been transfered to you
ham I am in tirupur. call you da.
spam You are a winner you have been specially selected to receive £1000 cash or a £2000 award. Speak to a live operator to claim call 087147123779am-7pm. Cost 10p
ham S:)but he had some luck.2 catches put down:)
ham How i noe... Did ü specify da domain as nusstu... Ü still in sch...
ham Oh...i asked for fun. Haha...take care. ü
ham Shall i get my pouch?
ham Hey loverboy! I love you !! I had to tell ... I look at your picture and ache to feel you between my legs ... Fuck I want you ... I need you ... I crave you .
ham How is my boy? No sweet words left for me this morning ... *sighs* ... How goes you day, my love ? Did you start your studying?
ham Kent vale lor... Ü wait 4 me there ar?
ham Ok. Very good. Its all about making that money.
ham Reading gud habit.. Nan bari hudgi yorge pataistha ertini kano:-)
ham Aight do you still want to get money
spam Free Top ringtone -sub to weekly ringtone-get 1st week free-send SUBPOLY to 81618-?3 per week-stop sms-08718727870
ham Ok.ok ok..then..whats ur todays plan
ham ARE YOU IN TOWN? THIS IS V. IMPORTANT
ham Sorry pa, i dont knw who ru pa?
ham Wat u doing there?
ham If i not meeting ü all rite then i'll go home lor. If ü dun feel like comin it's ok.
ham Oh, i will get paid. The most outstanding one is for a commercial i did for Hasbro...in AUGUST! They made us jump through so many hoops to get paid. Still not.
ham I am late,so call you tomorrow morning.take care sweet dreams....u and me...ummifying...bye.
ham Networking technical support associate.
ham I'm gonna rip out my uterus.
ham Cool. Do you like swimming? I have a pool and jacuzzi at my house.
spam Thanks for your ringtone order, reference number X49. Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989182. From: [colour=red]text[/colour]TXTstar
ham Yeah why not, is the gang all ready
ham Blank is Blank. But wat is blank? Lol
ham I'm in a movie... Collect car oredi...
ham We left already we at orchard now.
spam Hi there, 2nights ur lucky night! Uve been invited 2 XCHAT, the Uks wildest chat! Txt CHAT to 86688 now! 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs
ham Nothing spl..wat abt u and whr ru?
ham No chikku nt yet.. Ya i'm free
ham Aldrine, rakhesh ex RTM here.pls call.urgent.
ham The search 4 happiness is 1 of d main sources of unhappiness! Accept life the way it comes! U will find happiness in every moment u live.
ham I'm at home. Please call
ham I guess you could be as good an excuse as any, lol.
ham Isn't frnd a necesity in life? imagine urself witout a frnd.. hw'd u feel at ur colleg? wat'll u do wth ur cell? wat abt functions? thnk abt events espe'll cared, missed & irritated u? 4wrd it to all those dear-loving frnds wthout whom u cant live.. I jst did it.. Takecare..:) GOODMORNING
ham Gud mrng dear hav a nice day
ham Old Orchard near univ. How about you?
ham 4 tacos + 1 rajas burrito, right?
ham It‘s £6 to get in, is that ok?
ham Hows the street where the end of library walk is?
ham Plz note: if anyone calling from a mobile Co. & asks u to type # <#> or # <#> . Do not do so. Disconnect the call,coz it iz an attempt of 'terrorist' to make use of the sim card no. Itz confirmd by nokia n motorola n has been verified by CNN IBN.
ham We stopped to get ice cream and will go back after
ham Did you stitch his trouser
ham No da. . Vijay going to talk in jaya tv
spam 2/2 146tf150p
ham Hey i'm bored... So i'm thinking of u... So wat r u doing?
ham Nah, Wednesday. When should I bring the mini cheetos bag over?
ham Nobody names their penis a girls name this story doesn't add up at all
ham Aight, let me know when you're gonna be around usf
ham I'm not. She lip synced with shangela.
ham Ü neva tell me how i noe... I'm not at home in da aft wat...
ham A bit of Ur smile is my hppnss, a drop of Ur tear is my sorrow, a part of Ur heart is my life, a heart like mine wil care for U, forevr as my GOODFRIEND
spam Dear Voucher Holder 2 claim your 1st class airport lounge passes when using Your holiday voucher call 08704439680. When booking quote 1st class x 2
ham Buzz! Hey, my Love ! I think of you and hope your day goes well. Did you sleep in ? I miss you babe. I long for the moment we are together again*loving smile*
ham Haha... Sounds crazy, dunno can tahan anot...
ham Why are u up so early?
ham Ya that one is slow as poo
spam Bloomberg -Message center +447797706009 Why wait? Apply for your future http://careers. bloomberg.com
ham "Im on gloucesterroad what are uup to later?"
ham Yes:)here tv is always available in work place..
spam YES! The only place in town to meet exciting adult singles is now in the UK. Txt CHAT to 86688 now! 150p/Msg.
ham Lol no ouch but wish i'd stayed out a bit longer
ham GOD ASKED, "What is forgiveness?" A little child gave lovely reply, "It is d wonderful fruit that a tree gives when it is being hurt by a stone.. Good night......
ham We'll join the <#> bus
ham Was just about to ask. Will keep this one. Maybe that's why you didn't get all the messages we sent you on glo
spam FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+
ham K.i will send in <#> min:)
ham Would me smoking you out help us work through this difficult time
spam Someone U know has asked our dating service 2 contact you! Cant guess who? CALL 09058095107 NOW all will be revealed. POBox 7, S3XY 150p
ham Yes.mum lookin strong:)
ham Sir Goodmorning, Once free call me.
ham Where are you call me.
ham Was gr8 to see that message. So when r u leaving? Congrats dear. What school and wat r ur plans.
ham Love it! The girls at the office may wonder why you are smiling but sore...
ham Hi, wlcome back, did wonder if you got eaten by a lion or something, nothing much
ham Does uncle timi help in clearing cars
ham I came hostel. I m going to sleep. Plz call me up before class. Hrishi.
ham Ok... But bag again..
ham Hi! You just spoke to MANEESHA V. We'd like to know if you were satisfied with the experience. Reply Toll Free with Yes or No.
ham Ok lor. Msg me b4 u call.
spam Mila, age23, blonde, new in UK. I look sex with UK guys. if u like fun with me. Text MTALK to 69866.18 . 30pp/txt 1st 5free. £1.50 increments. Help08718728876
ham Once a fishrman woke early in d mrng. It was very dark. He waited a while & found a sack ful of stones. He strtd throwin thm in2 d sea 2 pass time. Atlast he had jus 1stone, sun rose up & he found out tht those r nt stones, those were diamonds. Moral:"Dont wake up early in d mrng'' GOOD night
spam Claim a 200 shopping spree, just call 08717895698 now! Have you won! MobStoreQuiz10ppm
ham Then ur physics get a-?
ham Dear friends, sorry for the late information. Today is the birthday of our loving Ar.Praveesh. for more details log on to face book and see. Its his number + <#> . Dont miss a delicious treat.
ham How r ü going to send it to me?
ham Can you do online transaction?
ham Dear got train and seat mine lower seat
ham Let me know if you need anything else. Salad or desert or something... How many beers shall i get?
ham Wat r u doing?
ham WHORE YOU ARE UNBELIEVABLE.
spam Want to funk up ur fone with a weekly new tone reply TONES2U 2 this text. www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply
ham Are you sure you don't mean "get here, we made you hold all the weed"
ham I love you !!! You know? Can you feel it? Does it make your belly warm? I wish it does, my love ... I shall meet you in your dreams, Ahmad ... *adoring kiss*
spam Twinks, bears, scallies, skins and jocks are calling now. Don't miss the weekend's fun. Call 08712466669 at 10p/min. 2 stop texts call 08712460324(nat rate)
ham Love it! I want to flood that pretty pussy with cum...
ham Hey are you angry with me. Reply me dr.
ham Short But Cute: "Be a good person, but dont try to prove it.." .Gud noon....
ham Also remember the beads don't come off. Ever.
ham They have a thread on the wishlist section of the forums where ppl post nitro requests. Start from the last page and collect from the bottom up.
ham For The First Time In The History 'Need' 'Comfort' And 'Luxury' Are Sold At Same Price In India..!! Onion-Rs. <#> Petrol-Rs. <#> Beer-Rs. <#> SHESIL <#>
ham Feb <#> is "I LOVE U" day. Send dis to all ur "VALUED FRNDS" evn me. If 3 comes back u'll gt married d person u luv! If u ignore dis u will lose ur luv 4 Evr
ham Actually nvm, got hella cash, we still on for <#> ish?
spam We tried to contact you re your reply to our offer of a Video Handset? 750 anytime any networks mins? UNLIMITED TEXT? Camcorder? Reply or call 08000930705 NOW
ham It's ok, at least armand's still around
ham No da. I am happy that we sit together na
ham Yup song bro. No creative. Neva test quality. He said check review online.
ham No dude, its not fake..my frnds got money, thts y i'm reffering u..if u member wit my mail link, u vl be credited <#> rs and il be getiing <#> rs..i can draw my acc wen it is <#> rs..
ham Dude while were makin those weirdy brownies my sister made awesome cookies. I took pics.
spam URGENT! We are trying to contact you. Last weekends draw shows that you have won a £900 prize GUARANTEED. Call 09061701851. Claim code K61. Valid 12hours only
ham Pls dont restrict her from eating anythin she likes for the next two days.
ham Mm you ask him to come its enough :-)
ham At the funeral home with Audrey and dad
ham Aight, can you text me the address?
ham Excellent! Wish we were together right now!
ham Yep then is fine 7.30 or 8.30 for ice age.
ham Pls i wont belive god.not only jesus.
ham Can. Dunno wat to get 4 her...
ham Not yet chikku..k, then wat abt tht guy did he stopped irritating or msging to u..
ham How long does it take to get it.
ham This is my number by vivek..
spam 74355 XMAS iscoming & ur awarded either £500 CD gift vouchers & free entry 2 r £100 weekly draw txt MUSIC to 87066 TnC
ham sorry brah, just finished the last of my exams, what up
ham I got arrested for possession at, I shit you not, <TIME> pm
ham You are right though. I can't give you the space you want and need. This is really starting to become an issue. I was going to suggest setting a definite move out--if i'm still there-- after greece. But maybe you are ready and should do it now.
ham Just normal only here :)
ham Please protect yourself from e-threats. SIB never asks for sensitive information like Passwords,ATM/SMS PIN thru email. Never share your password with anybody.
ham I miss you so much I'm so desparate I have recorded the message you left for me the other day and listen to it just to hear the sound of your voice. I love you
ham Hi. I'm always online on yahoo and would like to chat with you someday
ham Goodmorning,my grandfather expired..so am on leave today.
spam Congratulations U can claim 2 VIP row A Tickets 2 C Blu in concert in November or Blu gift guaranteed Call 09061104276 to claim TS&Cs www.smsco.net cost£3.75max
ham Where are you ? What are you doing ? Are yuou working on getting the pc to your mom's ? Did you find a spot that it would work ? I need you
ham Sure, I'll see if I can come by in a bit
ham I agree. So i can stop thinkin about ipad. Can you please ask macho the same question.
ham Let's pool our money together and buy a bunch of lotto tickets. If we win I get <#> % u get <#> %. Deal?
ham Ok.
ham I had askd u a question some hours before. Its answer
ham Watching tv lor. Nice one then i like lor.
ham I'm thinking that chennai forgot to come for auction..
ham Then ü come n pick me at 530 ar?
ham Early bird! Any purchases yet?
ham Went to pay rent. So i had to go to the bank to authorise the payment.
ham Erm … ill pick you up at about 6.45pm. That'll give enough time to get there, park and that.
ham HEY MATE! HOWS U HONEY?DID U AVE GOOD HOLIDAY? GIMMI DE GOSS!x
ham Howz pain.it will come down today.do as i said ystrday.ice and medicine.
ham chile, please! It's only a <DECIMAL> hour drive for me. I come down all the time and will be subletting feb-april for audition season.
ham Yes ammae....life takes lot of turns you can only sit and try to hold the steering...
ham Yeah that's what I thought, lemme know if anything's goin on later
ham Mmmm.... I cant wait to lick it!
ham Pls go there today <#> . I dont want any excuses
spam Fantasy Football is back on your TV. Go to Sky Gamestar on Sky Active and play £250k Dream Team. Scoring starts on Saturday, so register now!SKY OPT OUT to 88088
ham Can you plz tell me the ans. BSLVYL sent via fullonsms.com
ham U in town alone?
ham I to am looking forward to all the sex cuddling.. Only two more sleeps
ham We have all rounder:)so not required:)
ham No, its true..k,Do u knw dis no. <#> ?
ham Dont worry, 1 day very big lambu ji vl come..til then enjoy batchlor party:-)
ham oh ya... Got hip hop open. Haha i was thinking can go for jazz then zoom to cine... Actually tonight i'm free leh... And there's a kb lesson tonight
spam Free msg: Single? Find a partner in your area! 1000s of real people are waiting to chat now!Send CHAT to 62220Cncl send STOPCS 08717890890£1.50 per msg
ham I'm ok. Will do my part tomorrow
ham No! But we found a diff farm shop to buy some cheese. On way back now, can i call in?
ham R u still working now?
spam Win the newest Harry Potter and the Order of the Phoenix (Book 5) reply HARRY, answer 5 questions - chance to be the first among readers!
ham Yep. I do like the pink furniture tho.
spam Free Msg: Ringtone!From: http://tms. widelive.com/index. wml?id=1b6a5ecef91ff9*37819&first=true18:0430-JUL-05
ham Customer place, i wil cal u sir.
spam Oh my god! I've found your number again! I'm so glad, text me back xafter this msgs cst std ntwk chg £1.50
ham A pure hearted person can have a wonderful smile that makes even his/her enemies to feel guilty for being an enemy.. So catch the world with your smile..:) GOODMORNING & HAVE A SMILEY SUNDAY..:)
ham THATS ALRITE GIRL, U KNOW GAIL IS NEVA WRONG!!TAKE CARE SWEET AND DONT WORRY.C U L8TR HUN!LOVE Yaxxx
ham Theoretically yeah, he could be able to come
ham Alright we're hooked up, where you guys at
ham not that I know of, most people up here are still out of town
ham No let me do the math. Your not good at it.
ham Oh ok wait 4 me there... My lect havent finish
ham Yeah my usual guy's out of town but there're definitely people around I know
ham I am joining today formally.Pls keep praying.will talk later.
ham Happy or sad , one thing about past is- "Its no more" GOOD MORNING :-):-).
ham No. Did you multimedia message them or e-mail?
ham Okie but i scared u say i fat... Then u dun wan me already...
ham did u get that message
ham Sorry sir, i will call you tomorrow. senthil.hsbc
ham What you need. You have a person to give na.
ham She left it very vague. She just said she would inform the person in accounting about the delayed rent and that i should discuss with the housing agency about my renting another place. But checking online now and all places around usc are <#> and up
ham Hi juan. Im coming home on fri hey. Of course i expect a welcome party and lots of presents. Ill phone u when i get back. Loads of love nicky x x x x x x x x x
ham Can you plz tell me the ans. BSLVYL sent via fullonsms.com
ham Short But Cute: "Be a good person, but dont try to prove it.." .Gud noon....
ham Gumby's has a special where a <#> " cheese pizza is $2 so I know what we're doin tonight
spam A link to your picture has been sent. You can also use http://alto18.co.uk/wave/wave.asp?o=44345
ham Like a personal sized or what
ham Same, I'm at my great aunts anniversary party in tarpon springs
ham Cab is available.they pick up and drop at door steps.
ham ok....take care.umma to you too...
ham Unlimited texts. Limited minutes.
spam Double Mins & 1000 txts on Orange tariffs. Latest Motorola, SonyEricsson & Nokia with Bluetooth FREE! Call MobileUpd8 on 08000839402 or call2optout/HF8
ham No problem. We will be spending a lot of quality time together...
spam URGENT This is our 2nd attempt to contact U. Your £900 prize from YESTERDAY is still awaiting collection. To claim CALL NOW 09061702893. ACL03530150PM
ham Have you heard from this week?
spam Dear Dave this is your final notice to collect your 4* Tenerife Holiday or #5000 CASH award! Call 09061743806 from landline. TCs SAE Box326 CW25WX 150ppm
ham Yes. Last practice
spam tells u 2 call 09066358152 to claim £5000 prize. U have 2 enter all ur mobile & personal details @ the prompts. Careful!
ham No. Thank you. You've been wonderful
ham Otherwise had part time job na-tuition..
ham Ü mean it's confirmed... I tot they juz say oni... Ok then...
ham Okie
ham That depends. How would you like to be treated? :)
ham Right on brah, see you later
ham Waiting in e car 4 my mum lor. U leh? Reach home already?
spam Your 2004 account for 07XXXXXXXXX shows 786 unredeemed points. To claim call 08719181259 Identifier code: XXXXX Expires 26.03.05
spam Do you want a new video handset? 750 anytime any network mins? Half Price Line Rental? Camcorder? Reply or call 08000930705 for delivery tomorrow
ham Went fast asleep dear.take care.
ham No that just means you have a fat head
ham Sounds like a plan! Cardiff is still here and still cold! I'm sitting on the radiator!
ham Serious? What like proper tongued her
ham She.s good. She was wondering if you wont say hi but she.s smiling now. So how are you coping with the long distance
ham How i noe... She's in da car now... Later then c lar... I'm wearing shorts...
spam You have an important customer service announcement. Call FREEPHONE 0800 542 0825 now!
ham Yeah whatever lol
ham Today is ACCEPT DAY..U Accept me as? Brother Sister Lover Dear1 Best1 Clos1 Lvblefrnd Jstfrnd Cutefrnd Lifpartnr Belovd Swtheart Bstfrnd No rply means enemy
ham Ard 530 lor. I ok then message ü lor.
ham Ok. C u then.
ham Eh ur laptop got no stock lei... He say mon muz come again to take a look c got a not...
ham No need to ke qi... Ü too bored izzit y suddenly thk of this...
ham I wish! I don't think its gonna snow that much. But it will be more than those flurries we usually get that melt before they hit the ground. Eek! We haven't had snow since <#> before I was even born!
spam FREE>Ringtone! Reply REAL or POLY eg REAL1 1. PushButton 2. DontCha 3. BabyGoodbye 4. GoldDigger 5. WeBeBurnin 1st tone FREE and 6 more when u join for £3/wk
ham Do 1 thing! Change that sentence into: "Because i want 2 concentrate in my educational career im leaving here.."
ham Oh really? perform, write a paper, go to a movie AND be home by midnight, huh?
ham Okay lor... Will they still let us go a not ah? Coz they will not know until later. We drop our cards into the box right?
ham How? Izzit still raining?
ham As if i wasn't having enough trouble sleeping.
ham I havent add ü yet right..
ham Lol ... I really need to remember to eat when I'm drinking but I do appreciate you keeping me company that night babe *smiles*
ham Babe ? I lost you ... Will you try rebooting ?
ham Yes. Nigh you cant aha.
ham I thk ü gotta go home by urself. Cos i'll b going out shopping 4 my frens present.
ham Nooooooo I'm gonna be bored to death all day. Cable and internet outage.
ham Sos! Any amount i can get pls.
ham Playin space poker, u?
ham How come guoyang go n tell her? Then u told her?
ham You need to get up. Now.
ham They r giving a second chance to rahul dengra.
ham Yeah, in fact he just asked if we needed anything like an hour ago. When and how much?
ham WHEN THE FIRST STRIKE IS A RED ONE. THE BIRD + ANTELOPE BEGIN TOPLAY IN THE FIELDOF SELFINDEPENDENCE BELIEVE THIS + THE FLOWER OF CONTENTION WILL GROW.RANDOM!
ham Y ü wan to go there? C doctor?
ham Does daddy have a bb now.
spam Free Msg: get Gnarls Barkleys "Crazy" ringtone TOTALLY FREE just reply GO to this message right now!
ham She's borderline but yeah whatever.
ham I got a call from a landline number. . . I am asked to come to anna nagar . . . I will go in the afternoon
ham Until 545 lor... Ya, can go 4 dinner together...
ham I will be gentle princess! We will make sweet gentle love...
ham How u doin baby girl ?? hope u are okay every time I call ure phone is off! I miss u get in touch
ham Sorry, went to bed early, nightnight
ham I like to think there's always the possibility of being in a pub later.
ham HMM yeah if your not too grooved out! And im looking forward to my pound special :)
ham I got to video tape pple type in message lor. U so free wan 2 help me? Hee... Cos i noe u wan 2 watch infernal affairs so ask u along. Asking shuhui oso.
ham Hi dude hw r u da realy mising u today
ham Me hungry buy some food good lei... But mum n yun dun wan juz buy a little bit...
spam Refused a loan? Secured or Unsecured? Can't get credit? Call free now 0800 195 6669 or text back 'help' & we will!
ham I probably won't eat at all today. I think I'm gonna pop. How was your weekend? Did u miss me?
ham I knew it... U slept v late yest? Wake up so late...
ham Haha... dont be angry with yourself... Take it as a practice for the real thing. =)
ham Where is that one day training:-)
ham So i could kiss and feel you next to me...
ham Have a nice day my dear.
ham I sent lanre fakeye's Eckankar details to the mail box
ham Your dad is back in ph?
spam You have been specially selected to receive a "3000 award! Call 08712402050 BEFORE the lines close. Cost 10ppm. 16+. T&Cs apply. AG Promo
ham If you ask her or she say any please message.
ham If e timing can, then i go w u lor...
ham Love you aathi..love u lot..
ham I was just callin to say hi. Take care bruv!
spam YOU HAVE WON! As a valued Vodafone customer our computer has picked YOU to win a £150 prize. To collect is easy. Just call 09061743386
ham Did u turn on the heater? The heater was on and set to <#> degrees.
ham Thanks for your message. I really appreciate your sacrifice. I'm not sure of the process of direct pay but will find out on my way back from the test tomorrow. I'm in class now. Do have a wonderful day.
ham That's the trouble with classes that go well - you're due a dodgey one … Expecting mine tomo! See you for recovery, same time, same place
spam Free video camera phones with Half Price line rental for 12 mths and 500 cross ntwk mins 100 txts. Call MobileUpd8 08001950382 or Call2OptOut/674&
ham WOT U UP 2 J?
ham Night night, see you tomorrow
ham Roger that. We‘re probably going to rem in about 20
ham do u think that any girl will propose u today by seing ur bloody funky shit fucking face...............asssssholeeee................
ham I wish u were here. I feel so alone
spam Great NEW Offer - DOUBLE Mins & DOUBLE Txt on best Orange tariffs AND get latest camera phones 4 FREE! Call MobileUpd8 free on 08000839402 NOW! or 2stoptxt T&Cs
ham Reason is if the team budget is available at last they buy the unsold players for at base rate..
ham CERI U REBEL! SWEET DREAMZ ME LITTLE BUDDY!! C YA 2MORO! WHO NEEDS BLOKES
spam ringtoneking 84484
ham Huh i cant thk of more oredi how many pages do we have?
ham His frens go then he in lor. Not alone wif my mum n sis lor.
ham Nationwide auto centre (or something like that) on Newport road. I liked them there
ham Hey, I missed you tm of last night as my phone was on the charge ... *smiles* ... I am meeting a friend shortly
ham Whatever, juliana. Do whatever you want.
ham Friendship is not a game to play, It is not a word to say, It doesn\'t start on March and ends on May, It is tomorrow, yesterday, today and e
spam Ringtone Club: Gr8 new polys direct to your mobile every week !
ham Hello. Sort of out in town already. That . So dont rush home, I am eating nachos. Will let you know eta.
ham Ok lor. Anyway i thk we cant get tickets now cos like quite late already. U wan 2 go look 4 ur frens a not? Darren is wif them now...
spam (Bank of Granite issues Strong-Buy) EXPLOSIVE PICK FOR OUR MEMBERS *****UP OVER 300% *********** Nasdaq Symbol CDGT That is a $5.00 per..
ham I am on the way to ur home
ham Dizzamn, aight I'll ask my suitemates when I get back
ham Nimbomsons. Yep phone knows that one. Obviously, cos thats a real word
ham I love to cuddle! I want to hold you in my strong arms right now...
ham R u in this continent?
ham We'll you pay over like <#> yrs so its not too difficult
spam Bored housewives! Chat n date now! 0871750.77.11! BT-national rate 10p/min only from landlines!
spam We tried to call you re your reply to our sms for a video mobile 750 mins UNLIMITED TEXT free camcorder Reply or call now 08000930705 Del Thurs
ham K...k...when will you give treat?
spam This is the 2nd time we have tried to contact u. U have won the £400 prize. 2 claim is easy, just call 087104711148 NOW! Only 10p per minute. BT-national-rate
ham He's just gonna worry for nothing. And he won't give you money its no use.
ham Did you get any gift? This year i didnt get anything. So bad
ham somewhere out there beneath the pale moon light someone think in of u some where out there where dreams come true... goodnite & sweet dreams
ham Well there's a pattern emerging of my friends telling me to drive up and come smoke with them and then telling me that I'm a weed fiend/make them smoke too much/impede their doing other things so you see how I'm hesitant
ham , ow u dey.i paid 60,400thousad.i told u would call .
ham IM FINE BABES AINT BEEN UP 2 MUCH THO! SAW SCARY MOVIE YEST ITS QUITE FUNNY! WANT 2MRW AFTERNOON? AT TOWN OR MALL OR SUMTHIN?xx
ham I'm reaching home in 5 min.
ham Forgot you were working today! Wanna chat, but things are ok so drop me a text when you're free / bored etc and i'll ring. Hope all is well, nose essay and all xx
ham Ha... Then we must walk to everywhere... Cannot take tram. My cousin said can walk to vic market from our hotel
spam Wan2 win a Meet+Greet with Westlife 4 U or a m8? They are currently on what tour? 1)Unbreakable, 2)Untamed, 3)Unkempt. Text 1,2 or 3 to 83049. Cost 50p +std text
spam Please call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!
ham Discussed with your mother ah?
ham Ok.
ham Sorry, I can't text & drive coherently, see you in twenty
spam You will be receiving this week's Triple Echo ringtone shortly. Enjoy it!
ham In which place i can get rooms cheap:-)
ham Eek that's a lot of time especially since American Pie is like 8 minutes long. I can't stop singing it.
ham "GRAN ONLYFOUND OUT AFEW DAYS AGO.CUSOON HONI"
spam U've been selected to stay in 1 of 250 top British hotels - FOR NOTHING! Holiday valued at £350! Dial 08712300220 to claim - National Rate Call. Bx526, SW73SS
ham University of southern california.
ham We have to pick rayan macleran there.
ham U gd lor go shopping i got stuff to do. U wan 2 watch infernal affairs a not? Come lar...
ham Well. Balls. Time to make calls
ham Wat time ü wan today?
ham <#> in mca. But not conform.
ham Oh ok.. Wat's ur email?
ham Yes, princess. Are you going to make me moan?
ham Lol its ok I didn't remember til last nite
ham […] anyway, many good evenings to u! s
ham Cool, I'll text you in a few
ham Sorry vikky, i'm Watching olave mandara movie kano in trishul theatre wit my frnds..
ham I'm very happy for you babe ! Woo hoo party on dude!
ham I am taking you for italian food. How about a pretty dress with no panties? :)
ham Wot u up 2? Thout u were gonna call me!! Txt bak luv K
spam YOU ARE CHOSEN TO RECEIVE A £350 AWARD! Pls call claim number 09066364311 to collect your award which you are selected to receive as a valued mobile customer.
ham How are you holding up?
ham Dont flatter yourself... Tell that man of mine two pints of carlin in ten minutes please....
ham Hope you are not scared!
ham I cant pick the phone right now. Pls send a message
ham I'm at home n ready...
spam Please call our customer service representative on FREEPHONE 0808 145 4742 between 9am-11pm as you have WON a guaranteed £1000 cash or £5000 prize!
ham What time do u get out?
ham I am literally in bed and have been up for like <#> hours
ham Yes, my reg is Ciao!
ham If You mean the website. Yes.
spam Win a £1000 cash prize or a prize worth £5000
spam Thanks for your ringtone order, reference number X49.Your mobile will be charged 4.50. Should your tone not arrive please call customer services 09065989182
ham Lol or I could just starve and lose a pound by the end of the day.
ham Yeah that's the impression I got
ham Ok ok take care. I can understand.
ham Motivate Behind every darkness, there is a shining light waiting for you to find it... Behind every best friend, there is always trust and love... BSLVYL
ham Ya ok, then had dinner?
ham I was slept that time.you there?
ham dont make ne plans for nxt wknd coz she wants us to come down then ok
ham When is school starting. Where will you stay. What's the weather like. And the food. Do you have a social support system like friends in the school. All these things are important.
ham Ha ha nan yalrigu heltini..Iyo kothi chikku, u shared many things wit me..so far i didn't told any body and even uttered a word abt u.. If ur trusting me so much how can i tell these to others.. Plz nxt time dont use those words to me..ok, chikku:-);-)B-)
ham Noice. Text me when you're here
ham Hi di is yijue we're meeting at 7 pm at esaplanade tonight.
spam Moby Pub Quiz.Win a £100 High Street prize if u know who the new Duchess of Cornwall will be? Txt her first name to 82277.unsub STOP £1.50 008704050406 SP
spam This weeks SavaMob member offers are now accessible. Just call 08709501522 for details! SavaMob, POBOX 139, LA3 2WU. Only £1.50/week. SavaMob - offers mobile!
ham Aight I've been set free, think you could text me blake's address? It occurs to me I'm not quite as sure what I'm doing as I thought I was
ham Hi dear we saw dear. We both are happy. Where you my battery is low
ham How are you. Its been ages. How's abj
ham Prof: you have passed in all the papers in this sem congrats . . . . Student: Enna kalaachutaarama..!! Prof:???? Gud mrng!
ham Dont kick coco when he's down
ham Fyi I'm gonna call you sporadically starting at like <#> bc we are not not doin this shit
spam You are being contacted by our Dating Service by someone you know! To find out who it is, call from your mobile or landline 09064017305 PoBox75LDNS7
spam TBS/PERSOLVO. been chasing us since Sept for£38 definitely not paying now thanks to your information. We will ignore them. Kath. Manchester.
ham Hope youre not having too much fun without me!! see u tomorrow love jess x
ham Ok i wont call or disturb any one. I know all are avoiding me. I am a burden for all
ham I've reached home n i bathe liao... U can call me now...
spam Loans for any purpose even if you have Bad Credit! Tenants Welcome. Call NoWorriesLoans.com on 08717111821
ham Was the actual exam harder than NBME
ham A lot of this sickness thing going round. Take it easy. Hope u feel better soon. Lol
ham God picked up a flower and dippeditinaDEW, lovingly touched itwhichturnedinto u, and the he gifted tomeandsaid,THIS FRIEND IS 4U
spam 87077: Kick off a new season with 2wks FREE goals & news to ur mobile! Txt ur club name to 87077 eg VILLA to 87077
ham Hey sathya till now we dint meet not even a single time then how can i saw the situation sathya.
ham Gam gone after outstanding innings.
ham O i played smash bros <#> religiously.
ham Sir, good morning. Hope you had a good weekend. I called to let you know that i was able to raise <#> from my dad. He however said he would make the rest available by mid feb. This amount is still quite short and i was hoping you would help. Do have a good day. Abiola
ham Hurry home. Soup is DONE!
ham No no. I will check all rooms befor activities
ham Good afternoon, my love. It was good to see your words on YM and get your tm. Very smart move, my slave ... *smiles* ... I drink my coffee and await you.
ham Quite ok but a bit ex... U better go eat smth now else i'll feel guilty...
spam Orange brings you ringtones from all time Chart Heroes, with a free hit each week! Go to Ringtones & Pics on wap. To stop receiving these tips reply STOP.
ham Lemme know when you're here
spam PRIVATE! Your 2003 Account Statement for 07973788240 shows 800 un-redeemed S. I. M. points. Call 08715203649 Identifier Code: 40533 Expires 31/10/04
ham He needs to stop going to bed and make with the fucking dealing
ham How are you, my Love ? Are you with your brother ? Time to talk english with him ? *grins* Say : Hey Muhommad, Penny says hello from across the sea
spam We tried to call you re your reply to our sms for a video mobile 750 mins UNLIMITED TEXT + free camcorder Reply of call 08000930705 Now
ham Hey doc pls I want to get nice t shirt for my hubby nice fiting ones my budget is <#> k help pls I will load d card abi hw,keep me posted luv. 2 mj
ham I remain unconvinced that this isn't an elaborate test of my willpower
ham "Life is nothing wen v get everything". But "life is everything wen v miss something ". Real value of people wil be realized only in their absence.... gud mrng
ham how are you? I miss you!
ham I ain't answerin no phone at what is actually a pretty reasonable hour but I'm sleepy
ham Hey , is * rite u put »10 evey mnth is that all?
ham i am going to bed now prin
ham I think just yourself …Thanks and see you tomo
ham If u dun drive then how i go 2 sch.
ham I not at home now lei...
spam GSOH? Good with SPAM the ladies?U could b a male gigolo? 2 join the uk's fastest growing mens club reply ONCALL. mjzgroup. 08714342399.2stop reply STOP. msg@£1.50rcvd
ham Ok then i will come to ur home after half an hour
spam U have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094599
ham Do u hav any frnd by name ashwini in ur college?
ham Jus finish my lunch on my way home lor... I tot u dun wan 2 stay in sch today...
ham K then 2marrow are you coming to class.
spam HOT LIVE FANTASIES call now 08707500020 Just 20p per min NTT Ltd, PO Box 1327 Croydon CR9 5WB 0870 is a national rate call
ham Pls send me your address sir.
ham I want to lick your pussy now...
ham Yo, you gonna still be in stock tomorrow/today? I'm trying to get a dubsack
spam URGENT! Your Mobile number has been awarded a <UKP>2000 prize GUARANTEED. Call 09061790125 from landline. Claim 3030. Valid 12hrs only 150ppm
ham I'll see, but prolly yeah
ham Thought we could go out for dinner. I'll treat you! Seem ok?
ham Where are you ? What do you do ? How can you stand to be away from me ? Doesn't your heart ache without me ? Don't you wonder of me ? Don't you crave me ?
ham Sorry. You never hear unless you book it. One was kinda a joke--thet were really looking for skinny white girls. The other was one line--you can only do so much on camera with that. Something like that they're casting on the look.
ham What you doing?how are you?
ham Sure thing big man. i have hockey elections at 6, shouldn‘t go on longer than an hour though
ham Watch lor. I saw a few swatch one i thk quite ok. Ard 116 but i need 2nd opinion leh...
ham Hiya do u like the hlday pics looked horrible in them so took mo out! Hows the camp Amrca thing? Speak soon Serena:)
ham Babe! How goes that day ? What are you up to ? I miss you already, my Love ... * loving kiss* ... I hope everything goes well.
ham Yunny... I'm goin to be late
ham Doc prescribed me morphine cause the other pain meds aren't enough. Waiting for my mom to bring it. That med should kick in fast so I'm gonna try to be on later
ham Cool, want me to go to kappa or should I meet you outside mu
ham Hey sexy buns ! Have I told you ? I adore you, loverboy. I hope you remember to thank your sister in law for those meatballs *grins* ... i love you, babe
ham May b approve panalam...but it should have more posts..
spam SPJanuary Male Sale! Hot Gay chat now cheaper, call 08709222922. National rate from 1.5p/min cheap to 7.8p/min peak! To stop texts call 08712460324 (10p/min)
ham Sorry, I'll call later
ham I dont thnk its a wrong calling between us
ham Me i'm not workin. Once i get job...
ham And by when you're done I mean now
ham "Its Ur luck to Love someone. Its Ur fortune to Love the one who Loves U. But, its a miracle to Love a person who can't Love anyone except U..." Gud nyt...
ham Hi baby ive just got back from work and i was wanting to see u allday! I hope i didnt piss u off on the phone today. If u are up give me a call xxx
spam FreeMsg Today's the day if you are ready! I'm horny & live in your town. I love sex fun & games! Netcollex Ltd 08700621170150p per msg reply Stop to end
ham Is it your yahoo boys that bring in the perf? Or legal.
ham No need to say anything to me. I know i am an outsider
ham have you ever had one foot before?
ham Just got to <#>
ham Good! No, don‘t need any receipts—well done! (…) Yes, please tell . What‘s her number, i could ring her
ham Ever green quote ever told by Jerry in cartoon "A Person Who Irritates u Always Is the one Who Loves u Vry Much But Fails to Express It...!..!! :-) :-) gud nyt
ham Leave it wif me lar... Ü wan to carry meh so heavy... Is da num 98321561 familiar to ü?
ham Beautiful truth : Expression of the face could Be seen by everyone... But the depression of heart Could be understood only By the Loved ones.. Gud Ni8;-)
ham Infact happy new year. How are you where are you when are we seeing
spam In The Simpsons Movie released in July 2007 name the band that died at the start of the film? A-Green Day, B-Blue Day, C-Red Day. (Send A, B or C)
ham That's a shame! Maybe cld meet for few hrs tomo?
ham Lol I would but despite these cramps I like being a girl.
ham I cant wait for cornwall. Hope tonight isnt too bad as well but its rock night shite. Anyway im going for a kip now have a good night. Speak to you soon.
ham Pls help me tell sura that i'm expecting a battery from hont. And that if should pls send me a message about how to download movies. Thanks
spam Please call Amanda with regard to renewing or upgrading your current T-Mobile handset free of charge. Offer ends today. Tel 0845 021 3680 subject to T's and C's
ham Haven't found a way to get another app for your phone, eh ? Will you go to the net cafe ? Did you take that job? Geeee I need you babe. I crave to see you ...
ham I only work from mon to thurs but Sat i cant leh... Booked liao... Which other day u free?
ham Ü comin to fetch us oredi...
ham What's nannys address?
spam URGENT!! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham Haf u eaten? Wat time u wan me 2 come?
spam Want a new Video Phone? 750 anytime any network mins? Half price line rental free text for 3 months? Reply or call 08000930705 for free delivery
ham Yo, call me when you get the chance, a friend of mine wanted me to ask you about a big order
ham This single single answers are we fighting? Plus i said am broke and you didnt reply
ham It certainly puts things into perspective when something like this happens
ham Now got tv 2 watch meh? U no work today?
ham i felt so...not any conveying reason.. Ese he... What about me?
spam Had your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030
ham How's it going? Got any exciting karaoke type activities planned? I'm debating whether to play football this eve. Feeling lazy though.
ham I told that am coming on wednesday.
ham Its ok, called mom instead have fun
spam Dear Voucher Holder, To claim this weeks offer, at your PC please go to http://www.wtlp.co.uk/text. Ts&Cs apply.
ham Well if I'm that desperate I'll just call armand again
ham Are you at work right now ?
spam Congrats! Nokia 3650 video camera phone is your Call 09066382422 Calls cost 150ppm Ave call 3mins vary from mobiles 16+ Close 300603 post BCM4284 Ldn WC1N3XX
ham Haven't heard anything and he's not answering my texts so I'm guessing he flaked. That said the jb is fantastic
ham Mmmmmm ... I love you,so much, Ahmad ... I can't wait for this year to begin as every second takes me closer to being at your side. Happy New Year, my love!!
ham Pls what's the full name of joke's school cos fees in university of florida seem to actually be <#> k. Pls holla back
ham Sorry, I'll call later
ham Ok... But they said i've got wisdom teeth hidden inside n mayb need 2 remove.
ham And pls pls drink plenty plenty water
ham How are you doing. How's the queen. Are you going for the royal wedding
ham He's in lag. That's just the sad part but we keep in touch thanks to skype
ham Ok lor then we go tog lor...
ham Two teams waiting for some players
ham Can ü send me a copy of da report?
ham swhrt how u dey,hope ur ok, tot about u 2day.love n miss.take care.
ham Ok da, i already planned. I wil pick you.
spam Urgent! Please call 0906346330. Your ABTA complimentary 4* Spanish Holiday or £10,000 cash await collection SAE T&Cs BOX 47 PO19 2EZ 150ppm 18+
ham Sorry, I'll call later in meeting
ham I just really need shit before tomorrow and I know you won't be awake before like 6
ham I'm good. Have you registered to vote?
ham Hmm ok, i'll stay for like an hour cos my eye is really sore!
ham Dear got bus directly to calicut
ham Mm umma ask vava also to come tell him can play later together
ham Well the general price is <#> /oz, let me know if/when/how much you want
ham Sorry, I'll call later
ham Each Moment in a day,has its own value-Morning brings hope,afternoon brings faith,Evening brings luv,Night brings rest,Wish u find them all today.Good Morning
ham <#> w jetton ave if you forgot
ham Ok i'm coming home now.
ham Can not use foreign stamps in this country.
spam Double mins and txts 4 6months FREE Bluetooth on Orange. Available on Sony, Nokia Motorola phones. Call MobileUpd8 on 08000839402 or call2optout/N9DX
ham Sorry, it's a lot of friend-of-a-friend stuff, I'm just now about to talk to the actual guy who wants to buy
spam FREE for 1st week! No1 Nokia tone 4 ur mob every week just txt NOKIA to 8007 Get txting and tell ur mates www.getzed.co.uk POBox 36504 W45WQ norm150p/tone 16+
spam Want to funk up ur fone with a weekly new tone reply TONES2U 2 this text. www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply
spam cmon babe, make me horny, *turn* me on! Txt me your fantasy now babe -) Im hot, sticky and need you now. All replies cost £1.50. 2 cancel send STOP
ham I will come tomorrow di
ham Wylie update: my weed dealer carlos went to freedom and had a class with lunsford
ham Are you happy baby ? Are you alright ? Did you take that job ? I hope your fine. I send you a kiss to make you smile from across the sea ... *kiss* *kiss*
ham C movie is juz last minute decision mah. Juz watch 2 lar but i tot ü not interested.
ham How are you enjoying this semester? Take care brother.
spam IMPORTANT INFORMATION 4 ORANGE USER 0796XXXXXX. TODAY IS UR LUCKY DAY!2 FIND OUT WHY LOG ONTO http://www.urawinner.com THERE'S A FANTASTIC PRIZEAWAITING YOU!
ham Get the door, I'm here
ham Lets use it next week, princess :)
ham Or i go home first lar ü wait 4 me lor.. I put down my stuff first..
ham I want kfc its Tuesday. Only buy 2 meals ONLY 2. No gravy. Only 2 Mark. 2!
ham No da:)he is stupid da..always sending like this:)don believe any of those message.pandy is a mental:)
ham Oi when you gonna ring
spam Missed call alert. These numbers called but left no message. 07008009200
ham I attended but nothing is there.
ham Ard 530 like dat lor. We juz meet in mrt station then ü dun haf to come out.
ham No dear i was sleeping :-P
ham Er mw im filled tuth is aight
ham Will be office around 4 pm. Now i am going hospital.
ham Actually i'm waiting for 2 weeks when they start putting ad.
ham Anything lor if they all go then i go lor...
ham U free on sat rite? U wan 2 watch infernal affairs wif me n darren n mayb xy?
ham Plz note: if anyone calling from a mobile Co. & asks u to type # <#> or # <#> . Do not do so. Disconnect the call,coz it iz an attempt of 'terrorist' to make use of the sim card no. Itz confirmd by nokia n motorola n has been verified by CNN IBN.
ham Yo you around? A friend of mine's lookin to pick up later tonight
ham Stupid auto correct on my phone
ham Double eviction this week - Spiral and Michael and good riddance to them!
ham "The world suffers a lot... Not because of the violence of bad people. But because of the silence of good people!", Gud night....
ham Ok thats cool. Its , just off either raglan rd or edward rd. Behind the cricket ground. Gimme ring when ur closeby see you tuesday.
ham Buy one egg for me da..please:)
ham Have you started in skye
ham Have you bookedthe hut? And also your time off? How are you by the way?
ham And several to you sir.
ham U really pig leh sleep so much. My dad wake me up at 10 smth 2 eat lunch today.
ham I'm at home. Please call
ham My love ... I hope your not doing anything drastic. Don't you dare sell your pc or your phone ...
ham Now only i reached home. . . I am very tired now. . I will come tomorro
spam FREEMSG: Our records indicate you may be entitled to 3750 pounds for the Accident you had. To claim for free reply with YES to this msg. To opt out text STOP
spam U can WIN £100 of Music Gift Vouchers every week starting NOW Txt the word DRAW to 87066 TsCs www.Idew.com SkillGame, 1Winaweek, age16. 150ppermessSubscription
ham Life style garments account no please.
ham Lol wtf random. Btw is that your lunch break
ham Sez, hows u & de arab boy? Hope u r all good give my love 2 evry1 love ya eshxxxxxxxxxxx
ham The LAY MAN! Just to let you know you are missed and thought off. Do have a great day. And if you can send me bimbo and ugo's numbers, ill appreciate. Safe
ham Detroit. The home of snow. Enjoy it.
spam Show ur colours! Euro 2004 2-4-1 Offer! Get an England Flag & 3Lions tone on ur phone! Click on the following service message for info!
ham Okie...
ham Aight, I'm chillin in a friend's room so text me when you're on the way
ham Is toshiba portege m100 gd?
ham Well welp is sort of a semiobscure internet thing
spam Text PASS to 69669 to collect your polyphonic ringtones. Normal gprs charges apply only. Enjoy your tones
spam accordingly. I repeat, just text the word ok on your mobile phone and send
ham Loosu go to hospital. De dont let it careless.
ham How much for an eighth?
ham Omg Joanna is freaking me out. She's looked thru all my friends to find photos of me. And then she's asking about stuff on my MySpace which I haven't even logged on in like a year. :/
ham Send ur birthdate with month and year, I will tel u ur LIFE PARTNER'S name. and the method of calculation. Reply must.
ham Juz now havent woke up so a bit blur blur... Can? Dad went out liao... I cant cum now oso...
ham How about clothes, jewelry, and trips?
spam Block Breaker now comes in deluxe format with new features and great graphics from T-Mobile. Buy for just £5 by replying GET BBDELUXE and take the challenge
ham Aah! A cuddle would be lush! I'd need lots of tea and soup before any kind of fumbling!
spam important information 4 orange user . today is your lucky day!2find out why log onto http://www.urawinner.com THERE'S A FANTASTIC SURPRISE AWAITING YOU!
ham I am late. I will be there at
ham Sad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.
ham Are you plans with your family set in stone ?
ham Pls dont forget to study
ham You'll never believe this but i have actually got off at taunton. Wow
ham Den only weekdays got special price... Haiz... Cant eat liao... Cut nails oso muz wait until i finish drivin wat, lunch still muz eat wat...
ham She just broke down a list of reasons why nobody's in town and I can't tell if she's being sarcastic or just faggy
ham <DECIMAL> m but its not a common car here so its better to buy from china or asia. Or if i find it less expensive. I.ll holla
ham The greatest test of courage on earth is to bear defeat without losing heart....gn tc
ham SORRY IM STIL FUCKED AFTER LAST NITE WENT TOBED AT 430 GOT UP 4 WORK AT 630
ham Hey so whats the plan this sat?
ham Beauty sleep can help ur pimples too.
ham Great. Hope you are using your connections from mode men also cos you can never know why old friends can lead you to today
spam Natalja (25/F) is inviting you to be her friend. Reply YES-440 or NO-440 See her: www.SMS.ac/u/nat27081980 STOP? Send STOP FRND to 62468
ham Where to get those?
ham Kind of. Just missed train cos of asthma attack, nxt one in half hr so driving in. not sure where to park.
ham Ball is moving a lot.will spin in last :)so very difficult to bat:)
ham Haiyoh... Maybe your hamster was jealous of million
ham Can you please send me my aunty's number
ham I'm glad. You are following your dreams.
ham I've reached home finally...
spam URGENT. Important information for 02 user. Today is your lucky day! 2 find out why , log onto http://www.urawinner.com there is a fantastic surprise awaiting you !
spam WINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.
ham Wn u r hurt by d prsn who s close 2 u, do fight wit dem. Coz somtimes dis fight saves a relation bt being quiet leaves nothin in a relation.. Gud eveB-)
ham U can call now...
ham Science tells that chocolate will melt under the sunlight. Please don't walk under the sunlight. BCoz,I don't want to loss a sweet friend.
ham Yes. I come to nyc for audiitions and am trying to relocate.
ham I pocked you up there before
ham Congrats. That's great. I wanted to tell you not to tell me your score cos it might make me relax. But its motivating me so thanks for sharing
ham I wud never mind if u dont miss me or if u dont need me.. But u wil really hurt me wen u need me & u dont tell me......... Take care:-)
ham Hey mr whats the name of that bill brison book the one about language and words
ham Okay, good, no problem, and thanx!
ham For you information, IKEA is spelled with all caps. That is not yelling. when you thought i had left you, you were sitting on the bed among the mess when i came in. i said we were going after you got home from class. please don't try and bullshit me. It makes me want to listen to you less.
ham Call me when u're done...
ham G.W.R
ham You best watch what you say cause I get drunk as a motherfucker
spam Kit Strip - you have been billed 150p. Netcollex Ltd. PO Box 1013 IG11 OJA
spam HMV BONUS SPECIAL 500 pounds of genuine HMV vouchers to be won. Just answer 4 easy questions. Play Now! Send HMV to 86688 More info:www.100percent-real.com
spam Please CALL 08712402578 immediately as there is an urgent message waiting for you
spam thesmszone.com lets you send free anonymous and masked messages..im sending this message from there..do you see the potential for abuse???
spam WELL DONE! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TCs, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham Hurt me... Tease me... Make me cry... But in the end of my life when i die plz keep one rose on my grave and say STUPID I MISS U.. HAVE A NICE DAY BSLVYL
ham Erm... Woodland avenue somewhere. Do you get the parish magazine, his telephone number will be in there.
ham Are there TA jobs available? Let me know please cos i really need to start working
ham Aiyar hard 2 type. U later free then tell me then i call n scold n tell u.
ham Yup i'm free...
ham Good good, billy mates all gone. Just been jogging, again! Did enjoy concert?
ham Yo come over carlos will be here soon
ham Awww dat is sweet! We can think of something to do he he! Have a nice time tonight ill probably txt u later cos im lonely :( xxx.
ham I guess it is useless calling u 4 something important.
ham Ha ha - had popped down to the loo when you hello-ed me. Hello!
ham He dint tell anything. He is angry on me that why you told to abi.
spam Someone U know has asked our dating service 2 contact you! Cant Guess who? CALL 09058091854 NOW all will be revealed. PO BOX385 M6 6WU
ham It so happens that there r 2waxsto do wat you want. She can come and ill get her medical insurance. And she'll be able to deliver and have basic care. I'm currently shopping for the right medical insurance for her. So just give me til friday morning. Thats when i.ll see the major person that can guide me to the right insurance.
ham I keep ten rs in my shelf:) buy two egg.
ham I wasn't well babe, i have swollen glands at my throat ... What did you end up doing ?
ham Is ur changes 2 da report big? Cos i've already made changes 2 da previous report.
ham Captain is in our room:)
ham I can't speak, bcaz mobile have problem. I can listen you but you cann't listen my voice. So i calls you later.
ham HIYA STU WOT U UP 2.IM IN SO MUCH TRUBLE AT HOME AT MOMENT EVONE HATES ME EVEN U! WOT THE HELL AV I DONE NOW? Y WONT U JUST TELL ME TEXT BCK PLEASE LUV DAN
ham S...i will take mokka players only:)
ham Are you still playing with gautham?
ham Hey mr and I are going to the sea view and having a couple of gays I mean games! Give me a bell when ya finish
ham K, jason says he's gonna be around so I'll be up there around <#>
ham Sorry . I will be able to get to you. See you in the morning.
ham Aight well keep me informed
ham I am not having her number sir
ham Am only searching for good dual sim mobile pa.
ham That seems unnecessarily hostile
ham Dude got a haircut. Now its breezy up there
spam Congrats! 2 mobile 3G Videophones R yours. call 09061744553 now! videochat wid ur mates, play java games, Dload polyH music, noline rentl. bx420. ip4. 5we. 150pm
ham 1Apple/Day=No Doctor. 1Tulsi Leaf/Day=No Cancer. 1Lemon/Day=No Fat. 1Cup Milk/day=No Bone Problms 3 Litres Watr/Day=No Diseases Snd ths 2 Whom U Care..:-)
ham i thought we were doing a king of the hill thing there.
ham Nope i'll come online now..
ham ALSO TELL HIM I SAID HAPPY BIRTHDAY
ham Y bishan lei... I tot ü say lavender?
ham Boo what time u get out? U were supposed to take me shopping today. :(
ham Now u sound like manky scouse boy steve,like! I is travelling on da bus home.wot has u inmind 4 recreation dis eve?
ham Fyi I'm taking a quick shower, be at epsilon in like <#> min
ham on a Tuesday night r u 4 real
ham Yes when is the appt again?
ham Just got outta class gonna go gym.
ham I want to sent <#> mesages today. Thats y. Sorry if i hurts
ham Ü all write or wat..
ham Ha! I wouldn't say that I just didn't read anything into way u seemed. I don't like 2 be judgemental....i save that for fridays in the pub!
ham Its a valentine game. . . send dis msg to all ur friends. . If 5 answers r d same then someone really loves u. . Ques- which colour suits me the best?
ham Hi:)did you asked to waheeda fathima about leave?
ham Enjoy urself tmr...
ham You still around? I could use a half-8th
spam U 447801259231 have a secret admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09058094597
ham You give us back my id proof and <#> rs. We wont allow you to work. We will come to your home within days
ham Ü bot notes oredi... Cos i juz rem i got...
ham Yes. Rent is very expensive so its the way we save.
ham Night has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng
ham Hows the pain dear?y r u smiling?
ham Fun fact: although you would think armand would eventually build up a tolerance or some shit considering how much he smokes, he gets fucked up in like 2 hits
spam important information 4 orange user 0789xxxxxxx. today is your lucky day!2find out why log onto http://www.urawinner.com THERE'S A FANTASTIC SURPRISE AWAITING YOU!
ham Sorry, I can't help you on this.
ham Great. So should i send you my account number.
ham HELLOGORGEOUS, HOWS U? MY FONE WAS ON CHARGE LST NITW WEN U TEXD ME. HOPEU AD A NICE WKEND AS IM SURE U DID LOOKIN 4WARD 2 C-IN U 2MRW LUV JAZ
spam Our dating service has been asked 2 contact U by someone shy! CALL 09058091870 NOW all will be revealed. POBox84, M26 3UZ 150p
ham Ü only send me the contents page...
ham Night sweet, sleep well! I've just been to see The Exorcism of Emily Rose and may never sleep again! Hugs and snogs!
ham Don't Think About "What u Have Got" Think About "How to Use It That You Have Got" gooD ni8
ham I can't right this second, gotta hit people up first
ham Evry Emotion dsn't hav Words.Evry Wish dsn't hav Prayrs.. If u Smile,D World is wit u.Othrwise even d Drop of Tear dsn't lik 2 Stay wit u.So b happy.. Good morning, keep smiling:-)
ham So what about you. What do you remember
ham Ujhhhhhhh computer shipped out with address to sandiago and parantella lane. Wtf. Poop.
ham Mm yes dear look how i am hugging you both. :-P
ham I like dis sweater fr mango but no more my size already so irritating.
ham 1 I don't have her number and 2 its gonna be a massive pain in the ass and i'd rather not get involved if that's possible
ham Anytime lor...
spam Do you want a new Video handset? 750 any time any network mins? UNLIMITED TEXT? Camcorder? Reply or Call now 08000930705 for del Sat AM
ham Purity of friendship between two is not about smiling after reading the forwarded message..Its about smiling just by seeing the name. Gud evng
spam Ur balance is now £600. Next question: Complete the landmark, Big, A. Bob, B. Barry or C. Ben ?. Text A, B or C to 83738. Good luck!
ham Me fine..absolutly fine
ham K and you're sure I don't have to have consent forms to do it :V
spam Ur TONEXS subscription has been renewed and you have been charged £4.50. You can choose 10 more polys this month. www.clubzed.co.uk *BILLING MSG*
spam If you don't, your prize will go to another customer. T&C at www.t-c.biz 18+ 150p/min Polo Ltd Suite 373 London W1J 6HL Please call back if busy
ham How much is torch in 9ja.
ham Doing nothing, then u not having dinner w us?
ham How are you. Just checking up on you
ham Done it but internet connection v slow and can‘t send it. Will try again later or first thing tomo.
ham Mathews or tait or edwards or anderson
ham yeah sure thing mate haunt got all my stuff sorted but im going sound anyway promoting hex for .by the way who is this? dont know number. Joke
ham No need lar i go engin? Cos my sis at arts today...
ham Thanks honey but still haven't heard anything I will leave it a bit longer so not 2 crowd him and will try later - great advice thanks hope cardiff is still there!
spam Do you want a New Nokia 3510i Colour Phone Delivered Tomorrow? With 200 FREE minutes to any mobile + 100 FREE text + FREE camcorder Reply or Call 8000930705
ham , im .. On the snowboarding trip. I was wondering if your planning to get everyone together befor we go..a meet and greet kind of affair? Cheers,
ham S.i'm watching it in live..
ham see you then, we're all christmassy here!
ham K I'm ready, <#> ?
ham Do you know why god created gap between your fingers..? So that, One who is made for you comes & fills those gaps by holding your hand with LOVE..!
ham The greatest test of courage on earth is to bear defeat without losing heart....gn tc
ham what are your new years plans?
spam RECPT 1/3. You have ordered a Ringtone. Your order is being processed...
ham Baaaaaaaabe! Wake up ! I miss you ! I crave you! I need you!
ham Only just got this message, not ignoring you. Yes, i was. Shopping that is
ham Dear :-/ why you mood off. I cant drive so i brother to drive
ham When did dad get back.
ham Can you tell Shola to please go to college of medicine and visit the academic department, tell the academic secretary what the current situation is and ask if she can transfer there. She should ask someone to check Sagamu for the same thing and lautech. Its vital she completes her medical education in Nigeria. Its less expensive much less expensive. Unless she will be getting citizen rates in new zealand.
ham Yes just finished watching days of our lives. I love it.
ham Juz go google n search 4 qet...
ham Many times we lose our best ones bcoz we are
ham Good FRIENDS CaRE for each Other.. CLoSE Friends UNDERSTaND each Other... and TRUE Friends STaY forever beyond words, beyond time. Gud ni8
ham Just getting back home
ham Sorry, I'll call later <#> mins
ham Dun need to use dial up juz open da browser n surf...
spam As one of our registered subscribers u can enter the draw 4 a 100 G.B. gift voucher by replying with ENTER. To unsubscribe text STOP
ham Awesome, plan to get here any time after like <#> , I'll text you details in a wee bit
ham Take care and sleep well.you need to learn to change in life.you only need to get CONVINCED on that.i will wait but no more conversations between us.GET CONVINCED by that time.Your family is over for you in many senses.respect them but not overemphasise.or u have no role in my life.
spam For your chance to WIN a FREE Bluetooth Headset then simply reply back with "ADP"
ham You also didnt get na hi hi hi hi hi
ham Ya but it cant display internal subs so i gotta extract them
ham If i said anything wrong sorry de:-)
ham Sad story of a Man - Last week was my b'day. My Wife did'nt wish me. My Parents forgot n so did my Kids . I went to work. Even my Colleagues did not wish.
ham How stupid to say that i challenge god.You dont think at all on what i write instead you respond immed.
ham Yeah I should be able to, I'll text you when I'm ready to meet up
ham V skint too but fancied few bevies.waz gona go meet &othrs in spoon but jst bin watchng planet earth&sofa is v comfey; If i dont make it hav gd night
ham says that he's quitting at least5times a day so i wudn't take much notice of that. Nah, she didn't mind. Are you gonna see him again? Do you want to come to taunton tonight? U can tell me all about !
ham When you get free, call me
ham How have your little darlings been so far this week? Need a coffee run tomo?Can't believe it's that time of week already …
ham Ok i msg u b4 i leave my house.
ham Still at west coast... Haiz... Ü'll take forever to come back...
ham MMM ... Fuck .... Merry Christmas to me
ham alright. Thanks for the advice. Enjoy your night out. I'ma try to get some sleep...
ham Update your face book status frequently :)
ham Just now saw your message.it k da:)
ham Was it something u ate?
ham So what did the bank say about the money?
ham Aiyar dun disturb u liao... Thk u have lots 2 do aft ur cupboard come...
ham Hey they r not watching movie tonight so i'll prob b home early...
ham Yar lor... How u noe? U used dat route too?
ham 2mro i am not coming to gym machan. Goodnight.
ham Dont think you need yellow card for uk travel. Ask someone that has gone before. If you do its just <#> bucks
ham Can u look 4 me in da lib i got stuff havent finish yet.
ham Sounds great! Im going to sleep now. Have a good night!
spam Don't b floppy... b snappy & happy! Only gay chat service with photo upload call 08718730666 (10p/min). 2 stop our texts call 08712460324
ham House-Maid is the murderer, coz the man was murdered on <#> th January.. As public holiday all govt.instituitions are closed,including post office..understand?
ham How come u got nothing to do?
ham Nothing will ever be easy. But don't be looking for a reason not to take a risk on life and love
ham i want to grasp your pretty booty :)
ham I've got it down to a tea. not sure which flavour
ham I'm going 2 orchard now laready me reaching soon. U reaching?
ham Dear i am not denying your words please
ham You know my old Dom I told you about yesterday ? His name is Roger? He got in touch with me last night and wants me to meet him today at 2 pm
ham COME BACK TO TAMPA FFFFUUUUUUU
ham 2 celebrate my bday, y else?
ham Merry christmas to u too annie!
ham Please tell me you have some of that special stock you were talking about
ham I sent them. Do you like?
spam Urgent UR awarded a complimentary trip to EuroDisinc Trav, Aco&Entry41 Or £1000. To claim txt DIS to 87121 18+6*£1.50(moreFrmMob. ShrAcomOrSglSuplt)10, LS1 3AJ
ham Awesome, be there in a minute
ham And that is the problem. You walk around in "julianaland" oblivious to what is going on around you. I say the same things constantly and they go in one ear and out the other while you go off doing whatever you want to do. It's not that you don't know why I'm upset--it's that you don't listen when i tell you WHAT is going to upset me. Then you want to be surprised when I'm mad.
ham I've told you everything will stop. Just dont let her get dehydrated.
ham Or I guess <#> min
ham I'm home. Ard wat time will u reach?
ham Storming msg: Wen u lift d phne, u say "HELLO" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? "Margaret Hello" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@
ham If you want to mapquest it or something look up "usf dogwood drive", that's the tiny street where the parking lot is
ham Aight should I just plan to come up later tonight?
ham Die... I accidentally deleted e msg i suppose 2 put in e sim archive. Haiz... I so sad...
spam Welcome to UK-mobile-date this msg is FREE giving you free calling to 08719839835. Future mgs billed at 150p daily. To cancel send "go stop" to 89123
ham This is wishing you a great day. Moji told me about your offer and as always i was speechless. You offer so easily to go to great lengths on my behalf and its stunning. My exam is next friday. After that i will keep in touch more. Sorry.
ham Thanks again for your reply today. When is ur visa coming in. And r u still buying the gucci and bags. My sister things are not easy, uncle john also has his own bills so i really need to think about how to make my own money. Later sha.
ham Sorry I flaked last night, shit's seriously goin down with my roommate, what you up to tonight?
ham He said i look pretty wif long hair wat. But i thk he's cutting quite short 4 me leh.
ham Ranjith cal drpd Deeraj and deepak 5min hold
ham "CHEERS FOR CALLIN BABE.SOZI CULDNT TALKBUT I WANNATELL U DETAILS LATER WENWECAN CHAT PROPERLY X"
ham Hey u still at the gym?
ham She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,
ham Much better now thanks lol
ham Nothing, smsing u n xy lor. Sorry lor da guys neva c u in person but they sort of know u lor. So u wan 2 meet them xy ask me 2 bring u along 4 our next meeting.
ham Lemme know when I can swing by and pick up, I'm free basically any time after 1 all this semester
ham Wa... U so efficient... Gee... Thanx...
spam 3. You have received your mobile content. Enjoy
ham S but not able to sleep.
spam Want explicit SEX in 30 secs? Ring 02073162414 now! Costs 20p/min
ham We will meet soon princess! Ttyl!
ham I'll pick you up at about 5.15pm to go to taunton if you still want to come.
ham Oh :-)only 4 outside players allowed to play know
ham I anything lor.
ham Erutupalam thandiyachu
ham Y cant u try new invention to fly..i'm not joking.,
ham No..its ful of song lyrics..
ham What do u reckon as need 2 arrange transport if u can't do it, thanks
ham True lov n care wil nevr go unrecognized. though somone often makes mistakes when valuing it. but they will definitly undrstnd once when they start missing it.
ham Shopping? Eh ger i toking abt syd leh...Haha
ham What not under standing.
ham have * good weekend.
ham Miss call miss call khelate kintu opponenter miss call dhorte lage. Thats d rule. One with great phone receiving quality wins.
ham Call me when you get the chance plz <3
ham The new deus ex game comin early next yr
ham My computer just fried the only essential part we don't keep spares of because my fucking idiot roommates looovvve leaving the thing running on full <#> /7
ham My friend, she's studying at warwick, we've planned to go shopping and to concert tmw, but it may be canceled, havn't seen for ages, yeah we should get together sometime!
ham Probably a couple hours tops
ham LOL .. *grins* .. I'm not babe, but thanks for thinking of me!
ham Man this bus is so so so slow. I think you're gonna get there before me
ham Hope this text meets you smiling. If not then let this text give you a reason to smile. Have a beautiful day.
ham In case you wake up wondering where I am, I forgot I have to take care of something for grandma today, should be done before the parade
ham Ok
spam Latest Nokia Mobile or iPOD MP3 Player +£400 proze GUARANTEED! Reply with: WIN to 83355 now! Norcorp Ltd.£1,50/Mtmsgrcvd18+
spam SMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE
ham Nvm take ur time.
ham So wat's da decision?
ham Wot is u up 2 then bitch?
ham Stupid.its not possible
ham She told to hr that he want posting in chennai:)because i'm working here:)
spam Mobile Club: Choose any of the top quality items for your mobile. 7cfca1a
ham When are you guys leaving?
ham He neva grumble but i sad lor... Hee... Buy tmr lor aft lunch. But we still meetin 4 lunch tmr a not. Neva hear fr them lei. Ü got a lot of work ar?
ham Not able to do anything.
ham Ü takin linear algebra today?
ham This weekend is fine (an excuse not to do too much decorating)
ham Sorry I missed you babe. I was up late and slept in. I hope you enjoy your driving lesson, boytoy. I miss you too ... *teasing kiss*
ham Now project pa. After that only i can come.
spam Money i have won wining number 946 wot do i do next
ham Sure, whenever you show the fuck up >:(
ham That was random saw my old roomate on campus. He graduated
spam Congrats! 2 mobile 3G Videophones R yours. call 09061744553 now! videochat wid ur mates, play java games, Dload polyH music, noline rentl. bx420. ip4. 5we. 150pm
ham Men always needs a beautiful, intelligent, caring, loving, adjustable, cooperative wife. But the law allows only one wife....
ham That sucks. So what do you got planned for your yo valentine? I am your yo valentine aren't I?
ham Just got part Nottingham - 3 hrs 63miles. Good thing i love my man so much, but only doing 40mph. Hey ho
ham What to think no one saying clearly. Ok leave no need to ask her. I will go if she come or not
ham Hi good mornin.. Thanku wish u d same..
ham DO U WANT 2 MEET UP 2MORRO
ham Actually I decided I was too hungry so I haven't left yet :V
ham I've sent ü my part..
ham Cos i was out shopping wif darren jus now n i called him 2 ask wat present he wan lor. Then he started guessing who i was wif n he finally guessed darren lor.
spam I want some cock! My hubby's away, I need a real man 2 satisfy me. Txt WIFE to 89938 for no strings action. (Txt STOP 2 end, txt rec £1.50ea. OTBox 731 LA1 7WS. )
ham Understand. his loss is my gain :) so do you work? School?
ham HOW ARE U? I HAVE MISSED U! I HAVENT BEEN UP 2 MUCH A BIT BORED WITH THE HOLIDAY WANT 2 GO BAK 2 COLLEGE! SAD ISNT IT?xx
ham Hiya, probably coming home * weekend after next
ham Don't forget though that I love you .... And I walk beside you. Watching over you and keeping your heart warm.
ham I wish things were different. I wonder when i will be able to show you how much i value you. Pls continue the brisk walks no drugs without askin me please and find things to laugh about. I love you dearly.
ham Ok both our days. So what are you making for dinner tonite? Am I invited?
spam Gr8 new service - live sex video chat on your mob - see the sexiest dirtiest girls live on ur phone - 4 details text horny to 89070 to cancel send STOP to 89070
ham I have no money 4 steve mate! !
ham IM LATE TELLMISS IM ON MY WAY
ham Never blame a day in ur life. Good days give u happiness. Bad days give u experience. Both are essential in life! All are Gods blessings! good morning.:
ham Normally i use to drink more water daily:)
ham Dare i ask... Any luck with sorting out the car?
ham Party's at my place at usf, no charge (but if you can contribute in any way it is greatly appreciated) and yeah, we got room for one more
ham Urgh, coach hot, smells of chip fat! Thanks again, especially for the duvet (not a predictive text word).
ham Hiya. How was last night? I've been naughty and bought myself clothes and very little ... Ready for more shopping tho! What kind of time do you wanna meet?
spam FreeMsg Hi baby wow just got a new cam moby. Wanna C a hot pic? or Fancy a chat?Im w8in 4uTxt / rply CHAT to 82242 Hlp 08712317606 Msg150p 2rcv
ham I've been trying to reach him without success
ham when you and derek done with class?
ham Never y lei... I v lazy... Got wat? Dat day ü send me da url cant work one...
ham Never try alone to take the weight of a tear that comes out of ur heart and falls through ur eyes... Always remember a STUPID FRIEND is here to share... BSLVYL
ham Hey mate. Spoke to the mag people. We‘re on. the is deliver by the end of the month. Deliver on the 24th sept. Talk later.
ham Hope you are having a good week. Just checking in
ham Haha, my friend tyler literally just asked if you could get him a dubsack
ham "Hey! do u fancy meetin me at 4 at cha hav a lil beverage on me. if not txt or ring me and we can meet up l8r. quite tired got in at 3 v.pist ;) love Pete x x x"
ham Great. Have a safe trip. Dont panic surrender all.
ham "SYMPTOMS" when U are in love: "1.U like listening songs 2.U get stopped where u see the name of your beloved 3.U won't get angry when your
ham Sun ah... Thk mayb can if dun have anythin on... Thk have to book e lesson... E pilates is at orchard mrt u noe hor...
ham Try to do something dear. You read something for exams
ham 7 wonders in My WORLD 7th You 6th Ur style 5th Ur smile 4th Ur Personality 3rd Ur Nature 2nd Ur SMS and 1st "Ur Lovely Friendship"... good morning dear
ham Gettin rdy to ship comp
ham I am in hospital da. . I will return home in evening
ham PISS IS TALKING IS SOMEONE THAT REALISE U THAT POINT THIS AT IS IT.(NOW READ IT BACKWARDS)
ham Think + da. You wil do.
ham I'm awake oh. What's up.
ham Good afternoon my boytoy. How goes that walking here and there day ? Did you get that police abstract? Are you still out and about? I wake and miss you babe
ham How much u trying to get?
ham Come around <DECIMAL> pm vikky..i'm otside nw, il come by tht time
ham Tell me again what your address is
ham Honeybee Said: *I'm d Sweetest in d World* God Laughed & Said: *Wait,U Havnt Met d Person Reading This Msg* MORAL: Even GOD Can Crack Jokes! GM+GN+GE+GN:)
ham Should i buy him a blackberry bold 2 or torch. Should i buy him new or used. Let me know. Plus are you saying i should buy the <#> g wifi ipad. And what are you saying about the about the <#> g?
ham But you were together so you should be thinkin about him
ham hiya hows it going in sunny africa? hope u r avin a good time. give that big old silver back a big kiss from me.
ham At WHAT TIME should i come tomorrow
spam Wanna have a laugh? Try CHIT-CHAT on your mobile now! Logon by txting the word: CHAT and send it to No: 8883 CM PO Box 4217 London W1A 6ZF 16+ 118p/msg rcvd
ham "CHA QUITEAMUZING THATSCOOL BABE,PROBPOP IN & CU SATTHEN HUNNY 4BREKKIE! LOVE JEN XXX. PSXTRA LRG PORTIONS 4 ME PLEASE "
ham Omg how did u know what I ate?
spam "URGENT! This is the 2nd attempt to contact U!U have WON £1000CALL 09071512432 b4 300603t&csBCM4235WC1N3XX.callcost150ppmmobilesvary. max£7. 50"
ham :( but your not here....
ham Not directly behind... Abt 4 rows behind ü...
spam Congratulations ur awarded 500 of CD vouchers or 125gift guaranteed & Free entry 2 100 wkly draw txt MUSIC to 87066
spam Had your contract mobile 11 Mnths? Latest Motorola, Nokia etc. all FREE! Double Mins & Text on Orange tariffs. TEXT YES for callback, no to remove from records
spam Urgent! call 09066350750 from your landline. Your complimentary 4* Ibiza Holiday or 10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150 ppm 18+
ham No plans yet. What are you doing ?
ham Hi ....My engagement has been fixd on <#> th of next month. I know its really shocking bt....hmm njan vilikkam....t ws al of a sudn;-(.
ham Not course. Only maths one day one chapter with in one month we can finish.
ham Wow didn't think it was that common. I take it all back ur not a freak! Unless u chop it off:-)
spam For ur chance to win a £250 wkly shopping spree TXT: SHOP to 80878. T's&C's www.txt-2-shop.com custcare 08715705022, 1x150p/wk
ham Noooooooo please. Last thing I need is stress. For once in your life be fair.
spam U have a Secret Admirer who is looking 2 make contact with U-find out who they R*reveal who thinks UR so special-call on 09065171142-stopsms-08718727870150ppm
spam Mila, age23, blonde, new in UK. I look sex with UK guys. if u like fun with me. Text MTALK to 69866.18 . 30pp/txt 1st 5free. £1.50 increments. Help08718728876
ham I'll see if I can swing by in a bit, got some things to take care of here firsg
ham I wanted to wish you a Happy New Year and I wanted to talk to you about some legal advice to do with when Gary and I split but in person. I'll make a trip to Ptbo for that. I hope everything is good with you babe and I love ya :)
ham Have you not finished work yet or something?
ham Tomorrow i am not going to theatre. . . So i can come wherever u call me. . . Tell me where and when to come tomorrow
spam Well done ENGLAND! Get the official poly ringtone or colour flag on yer mobile! text TONE or FLAG to 84199 NOW! Opt-out txt ENG STOP. Box39822 W111WX £1.50
ham Right it wasnt you who phoned it was someone with a number like yours!
ham It's ok i wun b angry. Msg u aft i come home tonight.
ham I had a good time too. Its nice to do something a bit different with my weekends for a change. See ya soon
ham Yo sorry was in the shower sup
ham Carlos is down but I have to pick it up from him, so I'll swing by usf in a little bit
ham Full heat pa:-) i have applyed oil pa.
ham I'm stuck in da middle of da row on da right hand side of da lt...
ham Have you laid your airtel line to rest?
ham Hi did u decide wot 2 get 4 his bday if not ill prob jus get him a voucher frm virgin or sumfing
spam FreeMsg: Txt: CALL to No: 86888 & claim your reward of 3 hours talk time to use from your phone now! Subscribe6GBP/mnth inc 3hrs 16 stop?txtStop
ham "Hey j! r u feeling any better, hopeSo hunny. i amnow feelin ill & ithink i may have tonsolitusaswell! damn iam layin in bedreal bored. lotsof luv me xxxx"
ham And I don't plan on staying the night but I prolly won't be back til late
ham THANX 4 PUTTIN DA FONE DOWN ON ME!!
ham I need an 8th but I'm off campus atm, could I pick up in an hour or two?
ham Oh... Haha... Den we shld had went today too... Gee, nvm la... Kaiez, i dun mind goin jazz oso... Scared hiphop open cant catch up...
ham Been running but only managed 5 minutes and then needed oxygen! Might have to resort to the roller option!
ham We live in the next <#> mins
ham Y de asking like this.
ham Just glad to be talking to you.
ham Wat time ü finish?
ham Sorry da. I gone mad so many pending works what to do.
ham How much you got for cleaning
ham hows my favourite person today? r u workin hard? couldn't sleep again last nite nearly rang u at 4.30
spam Sunshine Quiz! Win a super Sony DVD recorder if you canname the capital of Australia? Text MQUIZ to 82277. B
ham Ü called dad oredi...
ham Good. do you think you could send me some pix? I would love to see your top and bottom...
ham Nvm... I'm going to wear my sport shoes anyway... I'm going to be late leh.
ham Sorry, I'll call later In meeting.
ham THIS IS A LONG FUCKIN SHOWR
ham Received, understood n acted upon!
ham They finally came to fix the ceiling.
ham U need my presnts always bcz U cant mis love. "jeevithathile irulinae neekunna prakasamanu sneham" prakasam ennal prabha 'That mns prabha is'LOVE' Got it. Dont mis me....
ham Jus finish blowing my hair. U finish dinner already?
ham I'm on the bus. Love you
ham Lol ... I knew that .... I saw him in the dollar store
spam Please call our customer service representative on 0800 169 6031 between 10am-9pm as you have WON a guaranteed £1000 cash or £5000 prize!
spam Todays Voda numbers ending with 7634 are selected to receive a £350 reward. If you have a match please call 08712300220 quoting claim code 7684 standard rates apply.
ham Only saturday and sunday holiday so its very difficult:)
ham Everybody had fun this evening. Miss you.
ham Got hella gas money, want to go on a grand nature adventure with galileo in a little bit?
ham I'm in a meeting, call me later at
ham Oh wow thats gay. Will firmware update help
ham These won't do. Have to move on to morphine
ham How come i din c ü... Yup i cut my hair...
ham K k pa Had your lunch aha.
ham Oh ho. Is this the first time u use these type of words
ham Captain vijaykanth is doing comedy in captain tv..he is drunken :)
ham Of course. I guess god's just got me on hold right now.
ham Do you hide anythiing or keeping distance from me
ham Havent.
spam You are being ripped off! Get your mobile content from www.clubmoby.com call 08717509990 poly/true/Pix/Ringtones/Games six downloads for only 3
ham Sorry i din lock my keypad.
ham Did u got that persons story
ham Are you planning to come chennai?
spam We tried to contact you re your reply to our offer of a Video Phone 750 anytime any network mins Half Price Line Rental Camcorder Reply or call 08000930705
ham God created gap btwn ur fingers so dat sum1 vry special will fill those gaps by holding ur hands.. Now plz dont ask y he created so much gap between legs !!!
ham We are okay. Going to sleep now. Later
ham Please protect yourself from e-threats. SIB never asks for sensitive information like Passwords,ATM/SMS PIN thru email. Never share your password with anybody.
ham Finally it has happened..! Aftr decades..! BEER is now cheaper than PETROL! The goverment expects us to "DRINK". . . But don't "DRIVE "
spam A £400 XMAS REWARD IS WAITING FOR YOU! Our computer has randomly picked you from our loyal mobile customers to receive a £400 reward. Just call 09066380611
ham Where r e meeting tmr?
ham Lol yes. But it will add some spice to your day.
ham Hope you are having a great day.
ham Our Prasanth ettans mother passed away last night. Just pray for her and family.
ham K, I'll work something out
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08718738002 Identifier Code: 48922 Expires 21/11/04
ham This message is from a great Doctor in India:-): 1) Do not drink APPY FIZZ. It contains Cancer causing age
ham I cant pick the phone right now. Pls send a message
ham You call him and tell now infront of them. Call him now.
ham Ok no prob...
ham Ladies first and genus second k .
ham No. Yes please. Been swimming?
ham Mum not going robinson already.
ham Ok set let u noe e details later...
ham Not..tel software name..
ham I send the print outs da.
ham IM REALY SOZ IMAT MY MUMS 2NITE WHAT ABOUT 2MORO
ham When I was born, GOD said, "Oh No! Another IDIOT". When you were born, GOD said, "OH No! COMPETITION". Who knew, one day these two will become FREINDS FOREVER!
ham I didnt get ur full msg..sometext is missing, send it again
ham Probably not, I'm almost out of gas and I get some cash tomorrow
spam Customer service announcement. We recently tried to make a delivery to you but were unable to do so, please call 07099833605 to re-schedule. Ref:9280114
ham I forgot 2 ask ü all smth.. There's a card on da present lei... How? Ü all want 2 write smth or sign on it?
ham I'm leaving my house now.
spam Hi babe its Chloe, how r u? I was smashed on saturday night, it was great! How was your weekend? U been missing me? SP visionsms.com Text stop to stop 150p/text
ham Ü ready then call me...
ham Wewa is 130. Iriver 255. All 128 mb.
ham It is a good thing I'm now getting the connection to bw
ham Sry da..jst nw only i came to home..
ham That's cool he'll be here all night, lemme know when you're around
ham Are you staying in town ?
ham Haha yeah, 2 oz is kind of a shitload
ham Ok u can take me shopping when u get paid =D
ham My life Means a lot to me, Not because I love my life, But because I love the people in my life, The world calls them friends, I call them my World:-).. Ge:-)..
ham Alright we'll bring it to you, see you in like <#> mins
ham But pls dont play in others life.
ham Eatin my lunch...
ham Hmmm.but you should give it on one day..
ham Didn't try, g and I decided not to head out
ham Ok no prob
ham Surly ill give it to you:-) while coming to review.
ham By march ending, i should be ready. But will call you for sure. The problem is that my capital never complete. How far with you. How's work and the ladies
ham Tessy..pls do me a favor. Pls convey my birthday wishes to Nimya..pls dnt forget it. Today is her birthday Shijas
ham Pls give her the food preferably pap very slowly with loads of sugar. You can take up to an hour to give it. And then some water. Very very slowly.
spam URGENT! Your Mobile No 07808726822 was awarded a £2,000 Bonus Caller Prize on 02/09/03! This is our 2nd attempt to contact YOU! Call 0871-872-9758 BOX95QU
ham A guy who gets used but is too dumb to realize it.
ham Okey dokey, i‘ll be over in a bit just sorting some stuff out.
ham Don no da:)whats you plan?
ham Yes fine
spam WIN: We have a winner! Mr. T. Foley won an iPod! More exciting prizes soon, so keep an eye on ur mobile or visit www.win-82050.co.uk
ham I liked the new mobile
ham Anytime...
ham Mmmmmmm *snuggles into you* ...*deep contented sigh* ... *whispers* ... I fucking love you so much I can barely stand it ...
ham Yar but they say got some error.
ham Hey anyway i have to :-)
ham Wow so healthy. Old airport rd lor. Cant thk of anything else. But i'll b bathing my dog later.
ham Wif my family booking tour package.
ham Did you say bold, then torch later. Or one torch and 2bold?
ham Haha awesome, I might need to take you up on that, what you doin tonight?
ham Ya i knw u vl giv..its ok thanks kano..anyway enjoy wit ur family wit 1st salary..:-);-)
ham Huh so slow i tot u reach long ago liao... U 2 more days only i 4 more leh...
ham Thats cool princess! I will cover your face in hot sticky cum :)
ham Big brother‘s really scraped the barrel with this shower of social misfits
ham Oops i thk i dun haf enuff... I go check then tell ü..
ham S:)8 min to go for lunch:)
ham Hey. What happened? U switch off ur cell d whole day. This isnt good. Now if u do care, give me a call tomorrow.
ham K will do, addie & I are doing some art so I'll be here when you get home
ham My uncles in Atlanta. Wish you guys a great semester.
ham Aiyo... Her lesson so early... I'm still sleepin, haha... Okie, u go home liao den confirm w me lor...
ham Forgot to tell ü smth.. Can ü like number the sections so that it's clearer..
ham Yup. Anything lor, if u dun wan it's ok...
ham I'm home, my love ... If your still awake ... *loving kiss*
ham HELLO PEACH! MY CAKE TASTS LUSH!
spam FREE GAME. Get Rayman Golf 4 FREE from the O2 Games Arcade. 1st get UR games settings. Reply POST, then save & activ8. Press 0 key for Arcade. Termsapply
ham There'll be a minor shindig at my place later tonight, you interested?
ham Jason says it's cool if we pick some up from his place in like an hour
spam Had your mobile 10 mths? Update to the latest Camera/Video phones for FREE. KEEP UR SAME NUMBER, Get extra free mins/texts. Text YES for a call
ham I (Career Tel) have added u as a contact on INDYAROCKS.COM to send FREE SMS. To remove from phonebook - sms NO to <#>
ham I've reached already.
ham I dont know ask to my brother. Nothing problem some thing that. Just i told .
ham K:)eng rocking in ashes:)
ham Wat time r ü going to xin's hostel?
ham Good Morning my Dear Shijutta........... Have a great & successful day.
spam Buy Space Invaders 4 a chance 2 win orig Arcade Game console. Press 0 for Games Arcade (std WAP charge) See o2.co.uk/games 4 Terms + settings. No purchase
ham Oh k:)after that placement there ah?
ham Not for possession, especially not first offense
ham Nt only for driving even for many reasons she is called BBD..thts it chikku, then hw abt dvg cold..heard tht vinobanagar violence hw is the condition..and hw ru ? Any problem?
ham I bought the test yesterday. Its something that lets you know the exact day u ovulate.when will get 2u in about 2 to 3wks. But pls pls dont fret. I know u r worried. Pls relax. Also is there anything in ur past history u need to tell me?
ham We have pizza if u want
ham I keep seeing weird shit and bein all "woah" then realising it's actually reasonable and I'm all "oh"
ham Many more happy returns of the day. I wish you happy birthday.
ham Ya very nice. . .be ready on thursday
ham I am in hospital da. . I will return home in evening
ham "Thinking of u ;) x"
spam Camera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.
ham Orh i tot u say she now still dun believe.
ham When you just put in the + sign, choose my number and the pin will show. Right?
ham The beauty of life is in next second.. which hides thousands of secrets. I wish every second will be wonderful in ur life...!! gud n8
ham Thanx u darlin!im cool thanx. A few bday drinks 2 nite. 2morrow off! Take care c u soon.xxx
ham If you're still up, maybe leave the credit card so I can get gas when I get back like he told me to
spam Your weekly Cool-Mob tones are ready to download !This weeks new Tones include: 1) Crazy Frog-AXEL F>>> 2) Akon-Lonely>>> 3) Black Eyed-Dont P >>>More info in n
ham Well boy am I glad G wasted all night at applebees for nothing
spam Cashbin.co.uk (Get lots of cash this weekend!) www.cashbin.co.uk Dear Welcome to the weekend We have got our biggest and best EVER cash give away!! These..
ham Ok lor... Or u wan me go look 4 u?
ham U wan 2 haf lunch i'm in da canteen now.
ham Don't make life too stressfull.. Always find time to Laugh.. It may not add years to your Life! But surely adds more life to ur years!! Gud ni8..swt dreams..
ham hey, looks like I was wrong and one of the kappa guys numbers is still on my phone, if you want I can text him and see if he's around
spam URGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790121 from land line. Claim 3030. Valid 12hrs only 150ppm
spam Thanks 4 your continued support Your question this week will enter u in2 our draw 4 £100 cash. Name the NEW US President? txt ans to 80082
ham I'm home. Doc gave me pain meds says everything is fine.
ham It's é only $140 ard...É rest all ard $180 at least...Which is é price 4 é 2 bedrm ($900)
ham Me too! Have a lovely night xxx
ham Prepare to be pleasured :)
ham Hi.:)technical support.providing assistance to us customer through call and email:)
ham if you text on your way to cup stop that should work. And that should be BUS
ham Whens your radio show?
spam Your unique user ID is 1172. For removal send STOP to 87239 customer services 08708034412
ham I'm not sure if its still available though
ham watever reLation u built up in dis world only thing which remains atlast iz lonlines with lotz n lot memories! feeling..
ham CHEERS LOU! YEAH WAS A GOODNITE SHAME U NEVA CAME! C YA GAILxx
ham Hi..i got the money da:)
ham Hi, Mobile no. <#> has added you in their contact list on www.fullonsms.com It s a great place to send free sms to people For more visit fullonsms.com
ham Ok then u tell me wat time u coming later lor.
ham U repeat e instructions again. Wat's e road name of ur house?
ham So many people seems to be special at first sight, But only very few will remain special to you till your last sight.. Maintain them till life ends.. Sh!jas
ham Quite lor. But dun tell him wait he get complacent...
ham Sorry completely forgot * will pop em round this week if your still here?
ham U R THE MOST BEAUTIFUL GIRL IVE EVER SEEN. U R MY BABY COME AND C ME IN THE COMMON ROOM
ham O we cant see if we can join denis and mina? Or does denis want alone time
ham Sen told that he is going to join his uncle finance in cbe
ham Yup... Hey then one day on fri we can ask miwa and jiayin take leave go karaoke
ham Call me, i am senthil from hsbc.
ham Especially since i talk about boston all up in my personal statement, lol! I woulda changed that if i had realized it said nyc! It says boston now.
ham Indeed and by the way it was either or - not both !
spam Urgent -call 09066649731from Landline. Your complimentary 4* Ibiza Holiday or £10,000 cash await collection SAE T&Cs PO BOX 434 SK3 8WP 150ppm 18+
ham Holy living christ what is taking you so long
ham Ü thk of wat to eat tonight.
ham Thanx. Yup we coming back on sun. Finish dinner going back 2 hotel now. Time flies, we're tog 4 exactly a mth today. Hope we'll haf many more mths to come...
ham We're on the opposite side from where we dropped you off
ham Yup. Izzit still raining heavily cos i'm in e mrt i can't c outside.
ham Send me your resume:-)
ham Gd luck 4 ur exams :-)
ham Or u ask they all if next sat can a not. If all of them can make it then i'm ok lor.
ham Sorry that was my uncle. I.ll keep in touch
ham Saw Guys and Dolls last night with Patrick Swayze it was great
spam URGENT This is our 2nd attempt to contact U. Your £900 prize from YESTERDAY is still awaiting collection. To claim CALL NOW 09061702893
spam Santa calling! Would your little ones like a call from Santa Xmas Eve? Call 09077818151 to book you time. Calls1.50ppm last 3mins 30s T&C www.santacalling.com
ham Just come home. I don't want u to be miserable
ham I dont know why she.s not getting your messages
ham its cool but tyler had to take off so we're gonna buy for him and drop it off at his place later tonight. Our total order is a quarter, you got enough?
ham The guy at the car shop who was flirting with me got my phone number from the paperwork and called and texted me. I'm nervous because of course now he may have my address. Should i call his boss and tell him, knowing this may get him fired?
ham Reverse is cheating. That is not mathematics.
ham How do you plan to manage that
ham Er, hello, things didn‘t quite go to plan – is limping slowly home followed by aa and with exhaust hanging off
ham Sorry for the delay. Yes masters
ham Call me when u finish then i come n pick u.
spam PRIVATE! Your 2004 Account Statement for 078498****7 shows 786 unredeemed Bonus Points. To claim call 08719180219 Identifier Code: 45239 Expires 06.05.05
ham What's up my own oga. Left my phone at home and just saw ur messages. Hope you are good. Have a great weekend.
ham Don't worry though, I understand how important it is that I be put in my place with a poorly thought out punishment in the face of the worst thing that has ever happened to me. Brb gonna go kill myself
ham Honey, can you pls find out how much they sell Predicte in Nigeria. And how many times can it be used. Its very important to have a reply before monday
ham E admin building there? I might b slightly earlier... I'll call u when i'm reaching...
ham fyi I'm at usf now, swing by the room whenever
ham i can call in <#> min if thats ok
ham Ummmmmaah Many many happy returns of d day my dear sweet heart.. HAPPY BIRTHDAY dear
ham Ü no home work to do meh...
ham Anything is valuable in only 2 situations: First- Before getting it... Second- After loosing it...
ham Me too. Mark is taking forever to pick up my prescription and the pain is coming back.
ham How's ur paper?
ham Got smaller capacity one? Quite ex...
spam Check Out Choose Your Babe Videos @ sms.shsex.netUN fgkslpoPW fgkslpo
ham Im good! I have been thinking about you...
spam u r a winner U ave been specially selected 2 receive £1000 cash or a 4* holiday (flights inc) speak to a live operator 2 claim 0871277810710p/min (18 )
ham :-) :-)
ham Not thought bout it... || Drink in tap & spile at seven. || Is that pub on gas st off broad st by canal. || Ok?
ham I am going to sleep. I am tired of travel.
ham Haha, just what I was thinkin
ham Yup but it's not giving me problems now so mayb i'll jus leave it...
ham Lol no. Just trying to make your day a little more interesting
ham How long before you get reply, just defer admission til next semester
ham The word "Checkmate" in chess comes from the Persian phrase "Shah Maat" which means; "the king is dead.." Goodmorning.. Have a good day..:)
ham Po de :-):):-):-):-). No need job aha.
ham Rats. Hey did u ever vote for the next themes?
spam New Mobiles from 2004, MUST GO! Txt: NOKIA to No: 89545 & collect yours today! From ONLY £1. www.4-tc.biz 2optout 087187262701.50gbp/mtmsg18 TXTAUCTION.
ham I hope your pee burns tonite.
ham OH RITE. WELL IM WITH MY BEST MATE PETE, WHO I WENT OUT WITH 4 A WEEK+ NOW WERE 2GEVA AGAIN. ITS BEEN LONGER THAN A WEEK.
ham Yay can't wait to party together!
ham ....photoshop makes my computer shut down.
ham All boys made fun of me today. Ok i have no problem. I just sent one message just for fun
ham That's one of the issues but california is okay. No snow so its manageable
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S. I. M. points. Call 08715203652 Identifier Code: 42810 Expires 29/10/0
ham Hmmm.... Mayb can try e shoppin area one, but forgot e name of hotel...
ham Awesome, that gonna be soon or later tonight?
ham I need details about that online job.
spam YOU HAVE WON! As a valued Vodafone customer our computer has picked YOU to win a £150 prize. To collect is easy. Just call 09061743386
ham Missing you too.pray inshah allah
ham Pls help me tell Ashley that i cant find her number oh
ham I am in escape theatre now. . Going to watch KAVALAN in a few minutes
ham S.this will increase the chance of winning.
ham either way works for me. I am <#> years old. Hope that doesnt bother you.
ham Maybe you should find something else to do instead???
ham Gain the rights of a wife.dont demand it.i am trying as husband too.Lets see
ham I liked your new house
ham I'm fine. Hope you are also
ham Also north carolina and texas atm, you would just go to the gre site and pay for the test results to be sent.
ham Same to u...
ham yes baby! I need to stretch open your pussy!
ham Thanks and ! Or bomb and date as my phone wanted to say!
ham Ok...
ham Hey, a guy I know is breathing down my neck to get him some bud, anyway you'd be able to get a half track to usf tonight?
ham "Response" is one of d powerful weapon 2 occupy a place in others 'HEART'... So, always give response 2 who cares 4 U"... Gud night..swt dreams..take care
ham Nokia phone is lovly..
spam **FREE MESSAGE**Thanks for using the Auction Subscription Service. 18 . 150p/MSGRCVD 2 Skip an Auction txt OUT. 2 Unsubscribe txt STOP CustomerCare 08718726270
spam Bored housewives! Chat n date now! 0871750.77.11! BT-national rate 10p/min only from landlines!
ham Sorry da..today i wont come to play..i have driving clas..
ham I'm really sorry I lit your hair on fire
ham Oh! Shit, I thought that was your trip! Loooooool ... That just makes SO much more sense now ... *grins* and the sofa reference was ... The "sleep on a couch" link you sent me ... Wasn't that how you went on your trip ? Oh ... And didn't your babe go with you for that celebration with your rents?
ham Okey dokey swashbuckling stuff what oh.
ham Watching cartoon, listening music & at eve had to go temple & church.. What about u?
ham 1. Tension face 2. Smiling face 3. Waste face 4. Innocent face 5.Terror face 6.Cruel face 7.Romantic face 8.Lovable face 9.decent face <#> .joker face.
ham Dip's cell dead. So i m coming with him. U better respond else we shall come back.
ham Well. You know what i mean. Texting
ham Hi dis is yijue i would be happy to work wif ü all for gek1510...
ham Lol! Oops sorry! Have fun.
ham Wat happened to the cruise thing
ham I know dat feelin had it with Pete! Wuld get with em , nuther place nuther time mayb?
spam lyricalladie(21/F) is inviting you to be her friend. Reply YES-910 or NO-910. See her: www.SMS.ac/u/hmmross STOP? Send STOP FRND to 62468
ham The world's most happiest frnds never have the same characters... Dey just have the best understanding of their differences...
spam No 1 POLYPHONIC tone 4 ur mob every week! Just txt PT2 to 87575. 1st Tone FREE ! so get txtin now and tell ur friends. 150p/tone. 16 reply HL 4info
ham Yeah just open chat and click friend lists. Then make the list. Easy as pie
ham alright tyler's got a minor crisis and has to be home sooner than he thought so be here asap
ham When/where do I pick you up
ham As usual u can call me ard 10 smth.
ham New Theory: Argument wins d SITUATION, but loses the PERSON. So dont argue with ur friends just.. . . . kick them & say, I'm always correct.!
ham For many things its an antibiotic and it can be used for chest abdomen and gynae infections even bone infections.
ham Poor girl can't go one day lmao
ham Or just do that 6times
spam Todays Vodafone numbers ending with 4882 are selected to a receive a £350 award. If your number matches call 09064019014 to receive your £350 award.
ham You have to pls make a note of all she.s exposed to. Also find out from her school if anyone else was vomiting. Is there a dog or cat in the house? Let me know later.
ham Japanese Proverb: If one Can do it, U too Can do it, If none Can do it,U must do it Indian version: If one Can do it, LET HIM DO it.. If none Can do it,LEAVE it!! And finally Kerala version: If one can do it, Stop him doing it.. If none can do it, Make a strike against it ...
ham Sounds like there could be a lot of time spent in that chastity device boy ... *grins* ... Or take your beatings like a good dog. Going to lounge in a nice long bath now ?
ham Its worse if if uses half way then stops. Its better for him to complete it.
ham Miserable. They don't tell u that the side effects of birth control are massive gut wrenching cramps for the first 2 months. I didn't sleep at all last night.
ham Send me the new number
ham Convey my regards to him
spam Want the latest Video handset? 750 anytime any network mins? Half price line rental? Reply or call 08000930705 for delivery tomorrow
ham 2 and half years i missed your friendship:-)
ham I cant pick the phone right now. Pls send a message
ham Oh for fuck's sake she's in like tallahassee
ham Haha, that was the first person I was gonna ask
spam ou are guaranteed the latest Nokia Phone, a 40GB iPod MP3 player or a £500 prize! Txt word: COLLECT to No: 83355! IBHltd LdnW15H 150p/Mtmsgrcvd18
ham Taka lor. Wat time u wan 2 come n look 4 us?
spam * FREE* POLYPHONIC RINGTONE Text SUPER to 87131 to get your FREE POLY TONE of the week now! 16 SN PoBox202 NR31 7ZS subscription 450pw
ham "I;m reaching in another 2 stops."
ham no, i *didn't* mean to post it. I wrote it, and like so many other times i've ritten stuff to you, i let it sit there. it WAS what i was feeling at the time. I was angry. Before i left, i hit send, then stop. It wasn't there. I checked on my phone when i got to my car. It wasn't there. You said you didn't sleep, you were bored. So why wouldn't THAT be the time to clean, fold laundry, etc.? At least make the bed?
spam Warner Village 83118 C Colin Farrell in SWAT this wkend @Warner Village & get 1 free med. Popcorn!Just show [email protected] 4-7/12. C t&c @kiosk. Reply SONY 4 mre film offers
ham Will you come online today night
ham Then anything special?
ham I'm in solihull, | do you want anything?
ham Will do. Have a good day
ham WE REGRET TO INFORM U THAT THE NHS HAS MADE A MISTAKE.U WERE NEVER ACTUALLY BORN.PLEASE REPORT 2 YOR LOCAL HOSPITAL 2B TERMINATED.WE R SORRY 4 THE INCONVENIENCE
ham Love that holiday Monday feeling even if I have to go to the dentists in an hour
ham I am on the way to tirupur.
spam Goal! Arsenal 4 (Henry, 7 v Liverpool 2 Henry scores with a simple shot from 6 yards from a pass by Bergkamp to give Arsenal a 2 goal margin after 78 mins.
ham You've already got a flaky parent. It'snot supposed to be the child's job to support the parent...not until they're The Ride age anyway. I'm supposed to be there to support you. And now i've hurt you. unintentional. But hurt nonetheless.
ham We took hooch for a walk toaday and i fell over! Splat! Grazed my knees and everything! Should have stayed at home! See you tomorrow!
ham Just dropped em off, omw back now
spam This is the 2nd time we have tried 2 contact u. U have won the 750 Pound prize. 2 claim is easy, call 08712101358 NOW! Only 10p per min. BT-national-rate
ham Sitting in mu waiting for everyone to get out of my suite so I can take a shower
ham Re your call; You didn't see my facebook huh?
ham G says you never answer your texts, confirm/deny
ham Its so common hearin How r u? Wat r u doing? How was ur day? So let me ask u something different. Did u smile today? If not, do it now.... Gud evng.
ham Hi Dear Call me its urgnt. I don't know whats your problem. You don't want to work or if you have any other problem at least tell me. Wating for your reply.
ham Oh yah... We never cancel leh... Haha
ham We can go 4 e normal pilates after our intro...
ham Ok... Let u noe when i leave my house.
ham Oh yes, why is it like torture watching england?
ham Wanna do some art?! :D
ham Just hopeing that wasn‘t too pissed up to remember and has gone off to his sisters or something!
spam Got what it takes 2 take part in the WRC Rally in Oz? U can with Lucozade Energy! Text RALLY LE to 61200 (25p), see packs or lucozade.co.uk/wrc & itcould be u!
spam Hi, the SEXYCHAT girls are waiting for you to text them. Text now for a great night chatting. send STOP to stop this service
ham Good morning, my boytoy! How's those yummy lips ? Where's my sexy buns now ? What do you do ? Do you think of me ? Do you crave me ? Do you need me ?
ham Match started.india <#> for 2
ham Once free call me sir.
ham Hey do you want anything to buy:)
ham Hey babe, how's it going ? Did you ever figure out where your going for New Years ?
ham K..k.:)congratulation ..
ham G wants to know where the fuck you are
ham No it was cancelled yeah baby! Well that sounds important so i understand my darlin give me a ring later on this fone love Kate x
ham Tomarrow i want to got to court. At <DECIMAL> . So you come to bus stand at 9.
ham Ü go home liao? Ask dad to pick me up at 6...
ham Omg you can make a wedding chapel in frontierville? Why do they get all the good stuff?
ham I'm eatin now lor, but goin back to work soon... E mountain deer show huh... I watch b4 liao, very nice...
ham Check mail.i have mailed varma and kept copy to you regarding membership.take care.insha allah.
ham Wrong phone! This phone! I answer this one but assume the other is people i don't well
ham Anyway I don't think I can secure anything up here, lemme know if you want me to drive down south and chill
ham I'm already back home so no probably not
spam Great News! Call FREEFONE 08006344447 to claim your guaranteed £1000 CASH or £2000 gift. Speak to a live operator NOW!
spam Hi this is Amy, we will be sending you a free phone number in a couple of days, which will give you an access to all the adult parties...
ham I am in bus on the way to calicut
ham Hi its me you are probably having too much fun to get this message but i thought id txt u cos im bored! and james has been farting at me all night
ham hi baby im sat on the bloody bus at the mo and i wont be home until about 7:30 wanna do somethin later? call me later ortxt back jess xx
spam Welcome to Select, an O2 service with added benefits. You can now call our specially trained advisors FREE from your mobile by dialling 402.
ham I lost 4 pounds since my doc visit last week woot woot! Now I'm gonna celebrate by stuffing my face!
ham U coming back 4 dinner rite? Dad ask me so i re confirm wif u...
ham Doing my masters. When will you buy a bb cos i have for sale and how's bf
ham Ahhhh...just woken up!had a bad dream about u tho,so i dont like u right now :) i didnt know anything about comedy night but i guess im up for it.
ham I'm vivek:)i got call from your number.
ham Why didn't u call on your lunch?
ham What i mean was i left too early to check, cos i'm working a 9-6.
ham I want <#> rs da:)do you have it?
ham A bit of Ur smile is my hppnss, a drop of Ur tear is my sorrow, a part of Ur heart is my life, a heart like mine wil care for U, forevr as my GOODFRIEND
ham Yup ok...
ham I want to see your pretty pussy...
spam Dear Voucher holder Have your next meal on us. Use the following link on your pc 2 enjoy a 2 4 1 dining experiencehttp://www.vouch4me.com/etlp/dining.asp
ham A few people are at the game, I'm at the mall with iouri and kaila
spam URGENT! We are trying to contact U. Todays draw shows that you have won a £2000 prize GUARANTEED. Call 09058094507 from land line. Claim 3030. Valid 12hrs only
spam You can donate £2.50 to UNICEF's Asian Tsunami disaster support fund by texting DONATE to 864233. £2.50 will be added to your next bill
ham Future is not what we planned for tomorrow.....! it is the result of what we do today...! Do the best in present... enjoy the future.
ham I will cme i want to go to hos 2morow. After that i wil cme. This what i got from her dear what to do. She didnt say any time
ham We are supposed to meet to discuss abt our trip... Thought xuhui told you? In the afternoon. Thought we can go for lesson after that
ham Hey come online! Use msn... We are all there
ham I'm fine. Hope you are good. Do take care.
ham Oops I was in the shower when u called. Hey a parking garage collapsed at university hospital. See I'm not crazy. Stuff like that DOES happen.
ham Aiyo u so poor thing... Then u dun wan 2 eat? U bathe already?
ham Yar... I tot u knew dis would happen long ago already.
ham You are gorgeous! keep those pix cumming :) thank you!
ham A boy was late 2 home. His father: "POWER OF FRNDSHIP"
ham JADE ITS PAUL. Y DIDNT U TXT ME? DO U REMEMBER ME FROM BARMED? I WANT 2 TALK 2 U! TXT ME
ham Spending new years with my brother and his family. Lets plan to meet next week. Are you ready to be spoiled? :)
ham So what u doing today?
ham I said its okay. Sorry
ham Slept? I thinkThis time ( <#> pm) is not dangerous
ham Networking job is there.
spam goldviking (29/M) is inviting you to be his friend. Reply YES-762 or NO-762 See him: www.SMS.ac/u/goldviking STOP? Send STOP FRND to 62468
ham Dont let studying stress you out. L8r.
ham That's y u haf 2 keep me busy...
ham No rushing. I'm not working. I'm in school so if we rush we go hungry.
ham Which channel:-):-):):-).
ham So your telling me I coulda been your real Valentine and I wasn't? U never pick me for NOTHING!!
spam Phony £350 award - Todays Voda numbers ending XXXX are selected to receive a £350 award. If you have a match please call 08712300220 quoting claim code 3100 standard rates app
ham We made it! Eta at taunton is 12:30 as planned, hope that‘s still okday?! Good to see you! :-xx
ham I'm hungry buy smth home...
ham "HEY KATE, HOPE UR OK... WILL GIVE U A BUZ WEDLUNCH. GO OUTSOMEWHERE 4 ADRINK IN TOWN..CUD GO 2WATERSHD 4 A BIT? PPL FROMWRK WILL BTHERE. LOVE PETEXXX."
ham My drive can only be read. I need to write
ham Just looked it up and addie goes back Monday, sucks to be her
ham Happy new year. Hope you are having a good semester
ham Esplanade lor. Where else...
ham Can you talk with me..
ham Hmph. Go head, big baller.
ham Well its not like you actually called someone a punto. That woulda been worse.
ham Nope. Since ayo travelled, he has forgotten his guy
ham You still around? Looking to pick up later
spam CDs 4u: Congratulations ur awarded £500 of CD gift vouchers or £125 gift guaranteed & Freeentry 2 £100 wkly draw xt MUSIC to 87066 TnCs www.ldew.com1win150ppmx3age16
ham There's someone here that has a year <#> toyota camry like mr olayiwola's own. Mileage is <#> k.its clean but i need to know how much will it sell for. If i can raise the dough for it how soon after landing will it sell. Holla back.
ham Guess which pub im in? Im as happy as a pig in clover or whatever the saying is!
ham ILL B DOWN SOON
ham Oh k. . I will come tomorrow
ham Go fool dont cheat others ok
ham My mobile number.pls sms ur mail id.convey regards to achan,amma.Rakhesh.Qatar
ham By the way, 'rencontre' is to meet again. Mountains dont....
spam You have WON a guaranteed £1000 cash or a £2000 prize. To claim yr prize call our customer service representative on 08714712412 between 10am-7pm Cost 10p
ham U attend ur driving lesson how many times a wk n which day?
ham Uncle G, just checking up on you. Do have a rewarding month
ham Hello boytoy ! Geeee ... I'm missing you today. I like to send you a tm and remind you I'm thinking of you ... And you are loved ... *loving kiss*
ham I think the other two still need to get cash but we can def be ready by 9
ham Hey gals...U all wanna meet 4 dinner at nìte?
spam Dear 0776xxxxxxx U've been invited to XCHAT. This is our final attempt to contact u! Txt CHAT to 86688 150p/MsgrcvdHG/Suite342/2Lands/Row/W1J6HL LDN 18yrs
ham Babe ! What are you doing ? Where are you ? Who are you talking to ? Do you think of me ? Are you being a good boy? Are you missing me? Do you love me ?
ham Great! How is the office today?
ham It's cool, we can last a little while. Getting more any time soon?
ham :-( sad puppy noise
ham Yes its possible but dint try. Pls dont tell to any one k
ham Anyway holla at me whenever you're around because I need an excuse to go creep on people in sarasota
ham Where you. What happen
ham I was gonna ask you lol but i think its at 7
spam Ur cash-balance is currently 500 pounds - to maximize ur cash-in now send GO to 86688 only 150p/meg. CC: 08718720201 HG/Suite342/2lands Row/W1j6HL
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S.I.M. points. Call 08715203685 Identifier Code:4xx26 Expires 13/10/04
ham Go chase after her and run her over while she's crossing the street
spam I'd like to tell you my deepest darkest fantasies. Call me 09094646631 just 60p/min. To stop texts call 08712460324 (nat rate)
ham Is there coming friday is leave for pongal?do you get any news from your work place.
ham Hey... Very inconvenient for your sis a not huh?
ham Ok i vl..do u know i got adsense approved..
ham * Was really good to see you the other day dudette, been missing you!
ham I want to go to perumbavoor
ham How many times i told in the stage all use to laugh. You not listen aha.
spam You won't believe it but it's true. It's Incredible Txts! Reply G now to learn truly amazing things that will blow your mind. From O2FWD only 18p/txt
ham (You didn't hear it from me)
ham Thanks for being there for me just to talk to on saturday. You are very dear to me. I cherish having you as a brother and role model.
ham Pls clarify back if an open return ticket that i have can be preponed for me to go back to kerala.
spam Natalie (20/F) is inviting you to be her friend. Reply YES-165 or NO-165 See her: www.SMS.ac/u/natalie2k9 STOP? Send STOP FRND to 62468
ham She ran off with a younger man. we will make pretty babies together :)
spam Jamster! To get your free wallpaper text HEART to 88888 now! T&C apply. 16 only. Need Help? Call 08701213186.
ham O ic lol. Should play 9 doors sometime yo
ham Dunno, my dad said he coming home 2 bring us out 4 lunch. Yup i go w u lor. I call u when i reach school lor...
ham We have sent JD for Customer Service cum Accounts Executive to ur mail id, For details contact us
ham Desires- u going to doctor 4 liver. And get a bit stylish. Get ur hair managed. Thats it.
ham Hmmm.still we dont have opener?
ham Yeah so basically any time next week you can get away from your mom & get up before 3
ham Edison has rightly said, "A fool can ask more questions than a wise man can answer" Now you know why all of us are speechless during ViVa.. GM,GN,GE,GNT:-)
ham I will vote for wherever my heart guides me
ham With my sis lor... We juz watched italian job.
ham Tick, tick, tick .... Where are you ? I could die of loneliness you know ! *pouts* *stomps feet* I need you ...
ham Lmao you know me so well...
spam Double Mins & Double Txt & 1/2 price Linerental on Latest Orange Bluetooth mobiles. Call MobileUpd8 for the very latest offers. 08000839402 or call2optout/LF56
ham Am on a train back from northampton so i'm afraid not! I'm staying skyving off today ho ho! Will be around wednesday though. Do you fancy the comedy club this week by the way?
ham Goodnight da thangam I really miss u dear.
ham Hey next sun 1030 there's a basic yoga course... at bugis... We can go for that... Pilates intro next sat.... Tell me what time you r free
ham Geeeee ... Your internet is really bad today, eh ?
spam Free video camera phones with Half Price line rental for 12 mths and 500 cross ntwk mins 100 txts. Call MobileUpd8 08001950382 or Call2OptOut/674
ham I think i am disturbing her da
ham Sorry, I'll call you later. I am in meeting sir.
ham Havent stuck at orchard in my dad's car. Going 4 dinner now. U leh? So r they free tonight?
ham Ok i also wan 2 watch e 9 pm show...
ham I dunno lei... Like dun haf...
ham But your brother transfered only <#> + <#> . Pa.
ham I calls you later. Afternoon onwords mtnl service get problem in south mumbai. I can hear you but you cann't listen me.
spam 83039 62735=£450 UK Break AccommodationVouchers terms & conditions apply. 2 claim you mustprovide your claim number which is 15541
ham Talk to g and x about that
ham Hai dear friends... This is my new & present number..:) By Rajitha Raj (Ranju)
spam 5p 4 alfie Moon's Children in need song on ur mob. Tell ur m8s. Txt Tone charity to 8007 for Nokias or Poly charity for polys: zed 08701417012 profit 2 charity.
ham As in different styles?
spam WIN a £200 Shopping spree every WEEK Starting NOW. 2 play text STORE to 88039. SkilGme. TsCs08714740323 1Winawk! age16 £1.50perweeksub.
ham Gud ni8 dear..slp well..take care..swt dreams..Muah..
ham I want to sent <#> mesages today. Thats y. Sorry if i hurts
spam This is the 2nd attempt to contract U, you have won this weeks top prize of either £1000 cash or £200 prize. Just call 09066361921
ham Well, i'm glad you didn't find it totally disagreeable ... Lol
ham Guy, no flash me now. If you go call me, call me. How madam. Take care oh.
spam Do you want a New Nokia 3510i colour phone DeliveredTomorrow? With 300 free minutes to any mobile + 100 free texts + Free Camcorder reply or call 08000930705.
ham Mark works tomorrow. He gets out at 5. His work is by your house so he can meet u afterwards.
ham "Keep ur problems in ur heart, b'coz nobody will fight for u. Only u & u have to fight for ur self & win the battle. -VIVEKANAND- G 9t.. SD..
ham Yeah, give me a call if you've got a minute
ham "HI BABE UAWAKE?FEELLIKW SHIT.JUSTFOUND OUT VIA ALETTER THATMUM GOTMARRIED 4thNOV.BEHIND OURBACKS FUCKINNICE!SELFISH,DEVIOUSBITCH.ANYWAY,IL CALL U"
ham Amazing : If you rearrange these letters it gives the same meaning... Dormitory = Dirty room Astronomer = Moon starer The eyes = They see Election results = Lies lets recount Mother-in-law = Woman Hitler Eleven plus two =Twelve plus one Its Amazing... !:-)
ham Aiya we discuss later lar... Pick ü up at 4 is it?
ham Hey happy birthday...
ham Sorry i missed your call. Can you please call back.
ham Omg if its not one thing its another. My cat has worms :/ when does this bad day end?
ham Good morning, im suffering from fever and dysentry ..will not be able to come to office today.
ham I wont do anything de.
ham What type of stuff do you sing?
ham St andre, virgil's cream
ham No no. I will check all rooms befor activities
ham My fri ah... Okie lor,goin 4 my drivin den go shoppin after tt...
ham Gokila is talking with you aha:)
ham Hi Shanil,Rakhesh here.thanks,i have exchanged the uncut diamond stuff.leaving back. Excellent service by Dino and Prem.
ham K.k.this month kotees birthday know?
ham But i'm really really broke oh. No amount is too small even <#>
ham Sorry about that this is my mates phone and i didnt write it love Kate
spam TheMob>Hit the link to get a premium Pink Panther game, the new no. 1 from Sugababes, a crazy Zebra animation or a badass Hoody wallpaper-all 4 FREE!
ham Ah, well that confuses things, doesnt it? I thought was friends with now. Maybe i did the wrong thing but i already sort of invited -tho he may not come cos of money.
ham Aight, call me once you're close
ham Nope thats fine. I might have a nap tho!
spam This msg is for your mobile content order It has been resent as previous attempt failed due to network error Queries to [email protected]
ham In other news after hassling me to get him weed for a week andres has no money. HAUGHAIGHGTUJHYGUJ
ham A Boy loved a gal. He propsd bt she didnt mind. He gv lv lttrs, Bt her frnds threw thm. Again d boy decided 2 aproach d gal , dt time a truck was speeding towards d gal. Wn it was about 2 hit d girl,d boy ran like hell n saved her. She asked 'hw cn u run so fast?' D boy replied "Boost is d secret of my energy" n instantly d girl shouted "our energy" n Thy lived happily 2gthr drinking boost evrydy Moral of d story:- I hv free msgs:D;): gud ni8
ham I wnt to buy a BMW car urgently..its vry urgent.but hv a shortage of <#> Lacs.there is no source to arng dis amt. <#> lacs..thats my prob
ham Ding me on ya break fassyole! Blacko from londn
ham I REALLY NEED 2 KISS U I MISS U MY BABY FROM UR BABY 4EVA
ham The sign of maturity is not when we start saying big things.. But actually it is, when we start understanding small things... *HAVE A NICE EVENING* BSLVYL
ham Oh you got many responsibilities.
spam You have 1 new message. Please call 08715205273
ham I've reached sch already...
spam December only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update VCo FREE on 08002986906
ham U definitely need a module from e humanities dis sem izzit? U wan 2 take other modules 1st?
ham Argh why the fuck is nobody in town ;_;
spam Get 3 Lions England tone, reply lionm 4 mono or lionp 4 poly. 4 more go 2 www.ringtones.co.uk, the original n best. Tones 3GBP network operator rates apply.
ham Thanks. Fills me with complete calm and reassurance!
ham Aslamalaikkum....insha allah tohar beeen muht albi mufti mahfuuz...meaning same here....
ham Are you driving or training?
ham Lol for real. She told my dad I have cancer
spam PRIVATE! Your 2003 Account Statement for 078
ham Oops I did have it, <#> ?
ham "NOT ENUFCREDEIT TOCALL.SHALL ILEAVE UNI AT 6 +GET A BUS TO YOR HOUSE?"
ham Hi Chikku, send some nice msgs
ham He is impossible to argue with and he always treats me like his sub, like he never released me ... Which he did and I will remind him of that if necessary
ham After my work ah... Den 6 plus lor... U workin oso rite... Den go orchard lor, no other place to go liao...
ham To the wonderful Okors, have a great month. We cherish you guys and wish you well each day. MojiBiola
ham Cuz ibored. And don wanna study
ham Wot about on wed nite I am 3 then but only til 9!
ham Rose for red,red for blood,blood for heart,heart for u. But u for me.... Send tis to all ur friends.. Including me.. If u like me.. If u get back, 1-u r poor in relation! 2-u need some 1 to support 3-u r frnd 2 many 4-some1 luvs u 5+- some1 is praying god to marry u.:-) try it....
ham Any way where are you and what doing.
ham That sucks. I'll go over so u can do my hair. You'll do it free right?
ham it's still not working. And this time i also tried adding zeros. That was the savings. The checking is <#>
ham Hmm... Dunno leh, mayb a bag 4 goigng out dat is not too small. Or jus anything except perfume, smth dat i can keep.
ham Sday only joined.so training we started today:)
ham Sorry * was at the grocers.
ham There are some nice pubs near here or there is Frankie n Bennys near the warner cinema?
spam YOU VE WON! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham Yup... I havent been there before... You want to go for the yoga? I can call up to book
ham Oh shut it. Omg yesterday I had a dream that I had 2 kids both boys. I was so pissed. Not only about the kids but them being boys. I even told mark in my dream that he was changing diapers cause I'm not getting owed in the face.
ham Yeah I imagine he would be really gentle. Unlike the other docs who treat their patients like turkeys.
spam FREE for 1st week! No1 Nokia tone 4 ur mobile every week just txt NOKIA to 8077 Get txting and tell ur mates. www.getzed.co.uk POBox 36504 W45WQ 16+ norm150p/tone
ham Now that you have started dont stop. Just pray for more good ideas and anything i see that can help you guys i.ll forward you a link.
ham Hi darlin im on helens fone im gonna b up the princes 2 nite please come up tb love Kate
ham I'm in office now da:)where are you?
ham Aiyar u so poor thing... I give u my support k... Jia you! I'll think of u...
ham Oh unintentionally not bad timing. Great. Fingers the trains play along! Will give fifteen min warning.
spam Get your garden ready for summer with a FREE selection of summer bulbs and seeds worth £33:50 only with The Scotsman this Saturday. To stop go2 notxt.co.uk
ham K..then come wenever u lik to come and also tel vikky to come by getting free time..:-)
ham Pls call me da. What happen.
ham Happy new year to u and ur family...may this new year bring happiness , stability and tranquility to ur vibrant colourful life:):)
ham No problem with the renewal. I.ll do it right away but i dont know his details.
ham Idk. I'm sitting here in a stop and shop parking lot right now bawling my eyes out because i feel like i'm a failure in everything. Nobody wants me and now i feel like i'm failing you.
ham Haven't left yet so probably gonna be here til dinner
ham Like <#> , same question
ham MY NEW YEARS EVE WAS OK. I WENT TO A PARTY WITH MY BOYFRIEND. WHO IS THIS SI THEN HEY
ham Sir, I need Velusamy sir's date of birth and company bank facilities details.
ham K k:) sms chat with me.
ham I will come with karnan car. Please wait till 6pm will directly goto doctor.
ham No but the bluray player can
ham Ok... Then r we meeting later?
ham Lol no. I just need to cash in my nitros. Hurry come on before I crash out!
ham Just send a text. We'll skype later.
ham Ok leave no need to ask
spam Congrats 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid ur mates, play java games, Dload polypH music, noline rentl. bx420. ip4. 5we. 150p
ham Ü still got lessons? Ü in sch?
ham Y she dun believe leh? I tot i told her it's true already. I thk she muz c us tog then she believe.
ham Oh did you charge camera
ham I‘ve got some salt, you can rub it in my open wounds if you like!
ham Now i'm going for lunch.
ham I'm in school now n i'll be in da lab doing some stuff give me a call when ü r done.
ham Oh k. . I will come tomorrow
ham Aight, text me tonight and we'll see what's up
ham U 2.
ham Water logging in desert. Geoenvironmental implications.
ham Raji..pls do me a favour. Pls convey my Birthday wishes to Nimya. Pls. Today is her birthday.
ham Company is very good.environment is terrific and food is really nice:)
ham Very strange. and are watching the 2nd one now but i'm in bed. Sweet dreams, miss u
spam SMS AUCTION - A BRAND NEW Nokia 7250 is up 4 auction today! Auction is FREE 2 join & take part! Txt NOKIA to 86021 now!
ham Hi hope u r both ok, he said he would text and he hasn't, have u seen him, let me down gently please
ham Babe! I fucking love you too !! You know? Fuck it was so good to hear your voice. I so need that. I crave it. I can't get enough. I adore you, Ahmad *kisses*
ham K sure am in my relatives home. Sms me de. Pls:-)
ham I sent them. Do you like?
ham Fuuuuck I need to stop sleepin, sup
ham I'm in town now so i'll jus take mrt down later.
ham I just cooked a rather nice salmon a la you
ham I uploaded mine to Facebook
ham WHAT TIME U WRKIN?
ham Okie
spam ree entry in 2 a weekly comp for a chance to win an ipod. Txt POD to 80182 to get entry (std txt rate) T&C's apply 08452810073 for details 18+
spam Our records indicate u maybe entitled to 5000 pounds in compensation for the Accident you had. To claim 4 free reply with CLAIM to this msg. 2 stop txt STOP
ham Sorry, I'll call later
ham Oh oh... Den muz change plan liao... Go back have to yan jiu again...
ham It's wylie, you in tampa or sarasota?
ham Ok... Take ur time n enjoy ur dinner...
ham Darren was saying dat if u meeting da ge den we dun meet 4 dinner. Cos later u leave xy will feel awkward. Den u meet him 4 lunch lor.
spam Spook up your mob with a Halloween collection of a logo & pic message plus a free eerie tone, txt CARD SPOOK to 8007 zed 08701417012150p per logo/pic
ham I like cheap! But i‘m happy to splash out on the wine if it makes you feel better..
ham She.s fine. I have had difficulties with her phone. It works with mine. Can you pls send her another friend request.
ham Ugh my leg hurts. Musta overdid it on mon.
spam Call Germany for only 1 pence per minute! Call from a fixed line via access number 0844 861 85 85. No prepayment. Direct access! www.telediscount.co.uk
spam YOU VE WON! Your 4* Costa Del Sol Holiday or £5000 await collection. Call 09050090044 Now toClaim. SAE, TC s, POBox334, Stockport, SK38xh, Cost£1.50/pm, Max10mins
ham WOT STUDENT DISCOUNT CAN U GET ON BOOKS?
ham Me fine..absolutly fine
ham How come she can get it? Should b quite diff to guess rite...
spam Had your mobile 11mths ? Update for FREE to Oranges latest colour camera mobiles & unlimited weekend calls. Call Mobile Upd8 on freefone 08000839402 or 2StopTxt
ham I will reach ur home in <#> minutes
ham Babe, I'm answering you, can't you see me ? Maybe you'd better reboot YM ... I got the photo ... It's great !
ham Hi.what you think about match?
ham I know you are thinkin malaria. But relax, children cant handle malaria. She would have been worse and its gastroenteritis. If she takes enough to replace her loss her temp will reduce. And if you give her malaria meds now she will just vomit. Its a self limiting illness she has which means in a few days it will completely stop
ham Dai i downloaded but there is only exe file which i can only run that exe after installing.
ham It is only yesterday true true.
ham K.k.how is your business now?
ham 3 pa but not selected.
spam Natalja (25/F) is inviting you to be her friend. Reply YES-440 or NO-440 See her: www.SMS.ac/u/nat27081980 STOP? Send STOP FRND to 62468
ham I keep ten rs in my shelf:) buy two egg.
ham I am late. I will be there at
ham Well thats nice. Too bad i cant eat it
ham I accidentally brought em home in the box
ham Pls she needs to dat slowly or she will vomit more.
ham I have to take exam with in march 3
ham Jane babes not goin 2 wrk, feel ill after lst nite. Foned in already cover 4 me chuck.:-)
ham 5 nights...We nt staying at port step liao...Too ex
ham If I die I want u to have all my stuffs.
ham "OH FUCK. JUSWOKE UP IN A BED ON A BOATIN THE DOCKS. SLEPT WID 25 YEAR OLD. SPINOUT! GIV U DA GOSSIP L8R. XXX"
ham Smile in Pleasure Smile in Pain Smile when trouble pours like Rain Smile when sum1 Hurts U Smile becoz SOMEONE still Loves to see u Smiling!!
ham Prabha..i'm soryda..realy..frm heart i'm sory
ham I re-met alex nichols from middle school and it turns out he's dealing!
spam PRIVATE! Your 2003 Account Statement for <fone no> shows 800 un-redeemed S. I. M. points. Call 08715203656 Identifier Code: 42049 Expires 26/10/04
ham It means u could not keep ur words.
ham Nope, I'm still in the market
ham I realise you are a busy guy and i'm trying not to be a bother. I have to get some exams outta the way and then try the cars. Do have a gr8 day
spam YOU ARE CHOSEN TO RECEIVE A £350 AWARD! Pls call claim number 09066364311 to collect your award which you are selected to receive as a valued mobile customer.
ham Hey what how about your project. Started aha da.
ham Ok cool. See ya then.
ham Am on the uworld site. Am i buying the qbank only or am i buying it with the self assessment also?
ham Your opinion about me? 1. Over 2. Jada 3. Kusruthi 4. Lovable 5. Silent 6. Spl character 7. Not matured 8. Stylish 9. Simple Pls reply..
spam Someonone you know is trying to contact you via our dating service! To find out who it could be call from your mobile or landline 09064015307 BOX334SK38ch
ham Yeah I can still give you a ride
ham Jay wants to work out first, how's 4 sound?
ham Gud gud..k, chikku tke care.. sleep well gud nyt
ham Its a part of checking IQ
ham Hmm thinking lor...
ham Of course ! Don't tease me ... You know I simply must see ! *grins* ... Do keep me posted my prey ... *loving smile* *devouring kiss*
ham thanks for the temales it was wonderful. Thank. Have a great week.
ham Thank you princess! I want to see your nice juicy booty...
ham Haven't eaten all day. I'm sitting here staring at this juicy pizza and I can't eat it. These meds are ruining my life.
ham Gud ni8 dear..slp well..take care..swt dreams..Muah..
ham U come n search tat vid..not finishd..
ham K I'm leaving soon, be there a little after 9
spam Urgent! Please call 09061213237 from a landline. £5000 cash or a 4* holiday await collection. T &Cs SAE PO Box 177 M227XY. 16+
ham Yeah work is fine, started last week, all the same stuff as before, dull but easy and guys are fun!
ham You do your studies alone without anyones help. If you cant no need to study.
ham Please tell me not all of my car keys are in your purse
ham I didnt get anything da
ham Ok... Sweet dreams...
ham Well she's in for a big surprise!
ham As usual..iam fine, happy & doing well..:)
ham 1 in cbe. 2 in chennai.
ham Can help u swoop by picking u up from wherever ur other birds r meeting if u want.
ham If anyone calls for a treadmill say you'll buy it. Make sure its working. I found an ad on Craigslist selling for $ <#> .
ham I absolutely LOVE South Park! I only recently started watching the office.
ham Did you see that film:)
ham Pls speak with me. I wont ask anything other then you friendship.
ham Storming msg: Wen u lift d phne, u say "HELLO" Do u knw wt is d real meaning of HELLO?? . . . It's d name of a girl..! . . . Yes.. And u knw who is dat girl?? "Margaret Hello" She is d girlfrnd f Grahmbell who invnted telphone... . . . . Moral:One can 4get d name of a person, bt not his girlfrnd... G o o d n i g h t . . .@
ham Gud ni8.swt drms.take care
ham HI DARLIN ITS KATE ARE U UP FOR DOIN SOMETHIN TONIGHT? IM GOING TO A PUB CALLED THE SWAN OR SOMETHING WITH MY PARENTS FOR ONE DRINK SO PHONE ME IF U CAN
ham Anything lar then ü not going home 4 dinner?
ham "ER, ENJOYIN INDIANS AT THE MO..yeP. SaLL gOoD HehE ;> hows bout u shexy? Pete Xx"
spam If you don't, your prize will go to another customer. T&C at www.t-c.biz 18+ 150p/min Polo Ltd Suite 373 London W1J 6HL Please call back if busy
ham Did u fix the teeth?if not do it asap.ok take care.
ham So u wan 2 come for our dinner tonight a not?
ham Hello.How u doing?What u been up 2?When will u b moving out of the flat, cos I will need to arrange to pick up the lamp, etc. Take care. Hello caroline!
ham Its too late:)but its k.wish you the same.
ham Hi. Hope ur day * good! Back from walk, table booked for half eight. Let me know when ur coming over.
ham Oh yeah clearly it's my fault
ham Dunno leh cant remember mayb lor. So wat time r we meeting tmr?
ham Best msg: It's hard to be with a person, when u know that one more step foward will make u fall in love.. & One step back can ruin ur friendship.. good night:-) ...
spam URGENT! Your Mobile number has been awarded with a £2000 prize GUARANTEED. Call 09061790126 from land line. Claim 3030. Valid 12hrs only 150ppm
ham Helloooo... Wake up..! "Sweet" "morning" "welcomes" "You" "Enjoy" "This Day" "with full of joy".. "GUD MRNG".
ham Vikky, come around <TIME> ..
ham And how you will do that, princess? :)
ham I have gone into get info bt dont know what to do
ham Yeah, probably here for a while
ham Sent me ur email id soon
spam URGENT! You have won a 1 week FREE membership in our £100,000 Prize Jackpot! Txt the word: CLAIM to No: 81010 T&C www.dbuk.net LCCLTD POBOX 4403LDNW1A7RW18
ham I'm still pretty weak today .. Bad day ?
ham Hey ! Don't forget ... You are MINE ... For ME ... My possession ... MY property ... MMM ... *childish smile* ...
ham An excellent thought by a misundrstud frnd: I knw u hate me bt the day wen u'll knw the truth u'll hate urself:-( Gn:-)
ham Hey! Congrats 2u2. id luv 2 but ive had 2 go home!
ham Dear where you. Call me
ham Xy trying smth now. U eat already? We havent...
spam Urgent! Please call 09061213237 from landline. £5000 cash or a luxury 4* Canary Islands Holiday await collection. T&Cs SAE PO Box 177. M227XY. 150ppm. 16+
ham I donno its in your genes or something
spam XMAS iscoming & ur awarded either £500 CD gift vouchers & free entry 2 r £100 weekly draw txt MUSIC to 87066 TnC www.Ldew.com1win150ppmx3age16subscription
ham Alex says he's not ok with you not being ok with it
ham Are u coming to the funeral home
ham My darling sister. How are you doing. When's school resuming. Is there a minimum wait period before you reapply? Do take care
ham I.ll hand her my phone to chat wit u
ham Well good morning mr . Hows london treatin' ya treacle?
ham I can't make it tonight
ham At WHAT TIME should i come tomorrow
ham About <#> bucks. The banks fees are fixed. Better to call the bank and find out.
ham I can. But it will tell quite long, cos i haven't finish my film yet...
ham Pls ask macho how much is budget for bb bold 2 is cos i saw a new one for <#> dollars.
ham "Hi missed your Call and my mumHas beendropping red wine all over theplace! what is your adress?"
ham Ill be at yours in about 3 mins but look out for me
ham What you did in leave.
ham I'm coming back on Thursday. Yay. Is it gonna be ok to get the money. Cheers. Oh yeah and how are you. Everything alright. Hows school. Or do you call it work now
ham Jolly good! By the way, will give u tickets for sat eve 7.30. Speak before then x
ham yeah, that's what I was thinking
ham K.k:)i'm going to tirunelvali this week to see my uncle ..i already spend the amount by taking dress .so only i want money.i will give it on feb 1
ham Here got ur favorite oyster... N got my favorite sashimi... Ok lar i dun say already... Wait ur stomach start rumbling...
ham My sister going to earn more than me da.
spam Get the official ENGLAND poly ringtone or colour flag on yer mobile for tonights game! Text TONE or FLAG to 84199. Optout txt ENG STOP Box39822 W111WX £1.50
ham Hahaha..use your brain dear
ham Jus finish watching tv... U?
ham K, fyi I'm back in my parents' place in south tampa so I might need to do the deal somewhere else
ham Good morning, my Love ... I go to sleep now and wish you a great day full of feeling better and opportunity ... You are my last thought babe, I LOVE YOU *kiss*
ham Kothi print out marandratha.
ham But we havent got da topic yet rite?
ham Ok no problem... Yup i'm going to sch at 4 if i rem correctly...
ham Thanks, I'll keep that in mind
ham Aah bless! How's your arm?
ham Dear Sir,Salam Alaikkum.Pride and Pleasure meeting you today at the Tea Shop.We are pleased to send you our contact number at Qatar.Rakhesh an Indian.Pls save our Number.Respectful Regards.
ham Gal n boy walking in d park. gal-can i hold ur hand? boy-y? do u think i would run away? gal-no, jst wana c how it feels walking in heaven with an prince..GN:-)
ham What makes you most happy?
ham Wishing you a wonderful week.
ham Sweet heart how are you?
ham Sir, waiting for your letter.
ham Dude im no longer a pisces. Im an aquarius now.
ham X course it 2yrs. Just so her messages on messenger lik you r sending me
ham I think steyn surely get one wicket:)
ham Neither [in sterm voice] - i'm studying. All fine with me! Not sure the thing will be resolved, tho. Anyway. Have a fab hols
ham Garbage bags, eggs, jam, bread, hannaford wheat chex
ham No. It's not pride. I'm almost <#> years old and shouldn't be takin money from my kid. You're not supposed to have to deal with this stuff. This is grownup stuff--why i don't tell you.
ham Sounds better than my evening im just doing my costume. Im not sure what time i finish tomorrow but i will txt you at the end.
ham My birthday is on feb <#> da. .
ham So when do you wanna gym?
ham You'd like that wouldn't you? Jerk!
ham Are u awake? Is there snow there?
ham And of course you should make a stink!
spam u r subscribed 2 TEXTCOMP 250 wkly comp. 1st wk?s free question follows, subsequent wks charged@150p/msg.2 unsubscribe txt STOP 2 84128,custcare 08712405020
ham No go. No openings for that room 'til after thanksgiving without an upcharge.
ham When you guys planning on coming over?
ham Wat ü doing now?
ham My Parents, My Kidz, My Friends n My Colleagues. All screaming.. SURPRISE !! and I was waiting on the sofa.. ... ..... ' NAKED...!
ham No sir. That's why i had an 8-hr trip on the bus last week. Have another audition next wednesday but i think i might drive this time.
ham Do I? I thought I put it back in the box
ham I'm home...
ham No one interested. May be some business plan.
ham Yup it's at paragon... I havent decided whether 2 cut yet... Hee...
ham Good morning princess! Have a great day!
ham Guai... Ü shd haf seen him when he's naughty... Ü so free today? Can go jogging...
ham Aiyo cos i sms ü then ü neva reply so i wait 4 ü to reply lar. I tot ü havent finish ur lab wat.
ham Living is very simple.. Loving is also simple.. Laughing is too simple.. Winning is tooo simple.. But, Being 'SIMPLE' is very difficult...;-) :-)
ham Tell me something. Thats okay.
ham Ok
ham Hmm. Shall i bring a bottle of wine to keep us amused? Just joking! I'll still bring a bottle. Red or white? See you tomorrow
ham This is ur face test ( 1 2 3 4 5 6 7 8 9 <#> ) select any number i will tell ur face astrology.... am waiting. quick reply...
ham Hey, iouri gave me your number, I'm wylie, ryan's friend
ham Yep get with the program. You're slacking.
ham I'm in inside office..still filling forms.don know when they leave me.
ham I think your mentor is , but not 100 percent sure.
spam Call 09095350301 and send our girls into erotic ecstacy. Just 60p/min. To stop texts call 08712460324 (nat rate)
spam Camera - You are awarded a SiPix Digital Camera! call 09061221066 fromm landline. Delivery within 28 days.
spam A £400 XMAS REWARD IS WAITING FOR YOU! Our computer has randomly picked you from our loyal mobile customers to receive a £400 reward. Just call 09066380611
ham Just trying to figure out when I'm suppose to see a couple different people this week. We said we'd get together but I didn't set dates
spam IMPORTANT MESSAGE. This is a final contact attempt. You have important messages waiting out our customer claims dept. Expires 13/4/04. Call 08717507382 NOW!
ham Hi mom we might be back later than <#>
spam dating:i have had two of these. Only started after i sent a text to talk sport radio last week. Any connection do you think or coincidence?
ham Lol, oh you got a friend for the dog ?
ham Ok., is any problem to u frm him? Wats matter?
ham K I'll head out in a few mins, see you there
ham Do u konw waht is rael FRIENDSHIP Im gving yuo an exmpel: Jsut ese tihs msg.. Evrey splleing of tihs msg is wrnog.. Bt sitll yuo can raed it wihtuot ayn mitsake.. GOODNIGHT & HAVE A NICE SLEEP..SWEET DREAMS..
ham I cant pick the phone right now. Pls send a message
ham I don't want you to leave. But i'm barely doing what i can to stay sane. fighting with you constantly isn't helping.
spam The current leading bid is 151. To pause this auction send OUT. Customer Care: 08718726270
spam Free entry to the gr8prizes wkly comp 4 a chance to win the latest Nokia 8800, PSP or £250 cash every wk.TXT GREAT to 80878 http//www.gr8prizes.com 08715705022
ham Somebody set up a website where you can play hold em using eve online spacebucks
ham Its sunny in california. The weather's just cool
spam You have 1 new message. Call 0207-083-6089
ham I can make it up there, squeezed <#> bucks out of my dad
ham Good day to You too.Pray for me.Remove the teeth as its painful maintaining other stuff.
ham How are you babes. Hope your doing ok. I had a shit nights sleep. I fell asleep at 5.Im knackered and im dreading work tonight. What are thou upto tonight. X
ham How do friends help us in problems? They give the most stupid suggestion that Lands us into another problem and helps us forgt the previous problem
ham I'm at work. Please call
ham I will be gentle baby! Soon you will be taking all <#> inches deep inside your tight pussy...
ham NOT MUCH NO FIGHTS. IT WAS A GOOD NITE!!
ham Ok.ok ok..then..whats ur todays plan
ham Nt joking seriously i told
ham Watching ajith film ah?
ham Ooooooh I forgot to tell u I can get on yoville on my phone
ham All done, all handed in. Don't know if mega shop in asda counts as celebration but thats what i'm doing!
ham I dont know exactly could you ask chechi.
ham Dunno lei shd b driving lor cos i go sch 1 hr oni.
ham As in i want custom officer discount oh.
ham That's necessarily respectful
ham Hi. Hope you had a good day. Have a better night.
ham And he's apparently bffs with carly quick now
ham HARD BUT TRUE: How much you show & express your love to someone....that much it will hurt when they leave you or you get seperated...!鈥┾??〨ud evening...
ham Babes I think I got ur brolly I left it in English wil bring it in 2mrw 4 u luv Franxx
ham Hi babe its me thanks for coming even though it didnt go that well!i just wanted my bed! Hope to see you soon love and kisses xxx
ham So gd got free ice cream... I oso wan...
ham Pls give her prometazine syrup. 5mls then <#> mins later feed.
ham So how many days since then?
ham Dear are you angry i was busy dear
ham Yup he msg me: is tat yijue? Then i tot it's my group mate cos we meeting today mah... I'm askin if ü leaving earlier or wat mah cos mayb ü haf to walk v far...
ham ... Are you in the pub?
ham There is a first time for everything :)
ham Daddy, shu shu is looking 4 u... U wan me 2 tell him u're not in singapore or wat?
ham I ask if u meeting da ge tmr nite...
ham Gr8. So how do you handle the victoria island traffic. Plus when's the album due
ham Nite nite pocay wocay luv u more than n e thing 4eva I promise ring u 2morrowxxxx
ham East coast
ham You should get more chicken broth if you want ramen unless there's some I don't know about
ham My slave! I want you to take 2 or 3 pictures of yourself today in bright light on your cell phone! Bright light!
ham Nope. I just forgot. Will show next week
ham So how are you really. What are you up to. How's the masters. And so on.
ham I'm at bruce & fowler now but I'm in my mom's car so I can't park (long story)
ham I dont know oh. Hopefully this month.
ham Hi elaine, is today's meeting confirmed?
ham Ok k..sry i knw 2 siva..tats y i askd..
ham Sorry, I'll call later
ham U horrible gal... U knew dat i was going out wif him yest n u still come n ask me...
ham Otherwise had part time job na-tuition..
ham Oh yeah! And my diet just flew out the window
spam Santa Calling! Would your little ones like a call from Santa Xmas eve? Call 09058094583 to book your time.
ham You didnt complete your gist oh.
ham Er yeah, i will b there at 15:26, sorry! Just tell me which pub/cafe to sit in and come wen u can
ham If you can make it any time tonight or whenever you can it's cool, just text me whenever you're around
ham If I was I wasn't paying attention
ham Thanx a lot 4 ur help!
ham You're gonna have to be way more specific than that
ham Jesus armand really is trying to tell everybody he can find
ham I'm wif him now buying tix lar...
ham Mode men or have you left.
ham Am slow in using biola's fne
ham "What are youdoing later? Sar xxx"
ham Hey i've booked the 2 lessons on sun liao...
ham Thank you. do you generally date the brothas?
ham By the way, make sure u get train to worc foregate street not shrub hill. Have fun night x
ham I thought i'd get him a watch, just cos thats the kind of thing u get4an18th. And he loves so much!
spam You have won a guaranteed 32000 award or maybe even £1000 cash to claim ur award call free on 0800 ..... (18+). Its a legitimat efreefone number wat do u think???
ham Good morning. At the repair shop--the ONLY reason i'm up at this hour.
ham And that's fine, I got enough bud to last most of the night at least
ham I am back. Good journey! Let me know if you need any of the receipts. Shall i tell you like the pendent?
ham So that takes away some money worries
ham aight we can pick some up, you open before tonight?
spam Latest News! Police station toilet stolen, cops have nothing to go on!
ham Sac needs to carry on:)
ham Just sing HU. I think its also important to find someone female that know the place well preferably a citizen that is also smart to help you navigate through. Even things like choosing a phone plan require guidance. When in doubt ask especially girls.
ham What???? Hello wats talks email address?
ham Except theres a chick with huge boobs.
ham Im just wondering what your doing right now?
ham Wishing you a beautiful day. Each moment revealing even more things to keep you smiling. Do enjoy it.
spam "For the most sparkling shopping breaks from 45 per person; call 0121 2025050 or visit www.shortbreaks.org.uk"
ham Arun can u transfr me d amt
ham Sorry, I'll call later
ham If you hear a loud scream in about <#> minutes its cause my Gyno will be shoving things up me that don't belong :/
spam December only! Had your mobile 11mths+? You are entitled to update to the latest colour camera mobile for Free! Call The Mobile Update Co FREE on 08002986906
ham Ok i thk i got it. Then u wan me 2 come now or wat?
spam Txt: CALL to No: 86888 & claim your reward of 3 hours talk time to use from your phone now! Subscribe6GBP/mnth inc 3hrs 16 stop?txtStop www.gamb.tv
ham U GOIN OUT 2NITE?
ham I will treasure every moment we spend together...
ham Shall I bring us a bottle of wine to keep us amused? Only joking! I‘ll bring one anyway
spam http//tms. widelive.com/index. wml?id=820554ad0a1705572711&first=true¡C C Ringtone¡
spam Get your garden ready for summer with a FREE selection of summer bulbs and seeds worth £33:50 only with The Scotsman this Saturday. To stop go2 notxt.co.uk
spam URGENT! Last weekend's draw shows that you have won £1000 cash or a Spanish holiday! CALL NOW 09050000332 to claim. T&C: RSTM, SW7 3SS. 150ppm
ham Ok lor.
ham I thought slide is enough.
ham Yup
ham Well obviously not because all the people in my cool college life went home ;_;
ham Ok lor ü reaching then message me.
ham Where's mummy's boy ? Is he being good or bad ? Is he being positive or negative ? Why is mummy being made to wait? Hmmmm?
ham Dhoni have luck to win some big title.so we will win:)
ham Yes princess! I want to please you every night. Your wish is my command...
ham What Today-sunday..sunday is holiday..so no work..
ham No probably <#> %.
ham Really do hope the work doesnt get stressful. Have a gr8 day.
ham Have you seen who's back at Holby?!
ham Shall call now dear having food
spam URGENT We are trying to contact you Last weekends draw shows u have won a £1000 prize GUARANTEED Call 09064017295 Claim code K52 Valid 12hrs 150p pm
ham So li hai... Me bored now da lecturer repeating last weeks stuff waste time...
ham , , and picking them up from various points | going 2 yeovil | and they will do the motor project 4 3 hours | and then u take them home. || 12 2 5.30 max. || Very easy
ham Also fuck you and your family for going to rhode island or wherever the fuck and leaving me all alone the week I have a new bong >:(
ham Ofcourse I also upload some songs
spam 2p per min to call Germany 08448350055 from your BT line. Just 2p per min. Check PlanetTalkInstant.com for info & T's & C's. Text stop to opt out
ham K. I will sent it again
ham Oh thanks a lot..i already bought 2 eggs ..
ham K. I will sent it again
ham U studying in sch or going home? Anyway i'll b going 2 sch later.
spam Marvel Mobile Play the official Ultimate Spider-man game (£4.50) on ur mobile right now. Text SPIDER to 83338 for the game & we ll send u a FREE 8Ball wallpaper
ham I think if he rule tamilnadu..then its very tough for our people.
ham Cool, we shall go and see, have to go to tip anyway. Are you at home, got something to drop in later? So lets go to town tonight! Maybe mum can take us in.
ham Good afternoon, my love ... How goes your day ? How did you sleep ? I hope your well, my boytoy ... I think of you ...
ham Yes... I trust u to buy new stuff ASAP so I can try it out
spam SMS SERVICES. for your inclusive text credits, pls goto www.comuk.net login= 3qxj9 unsubscribe with STOP, no extra charge. help 08702840625.COMUK. 220-CM2 9AE
ham Why did I wake up on my own >:(
ham Now get step 2 outta the way. Congrats again.
ham Love has one law; Make happy the person you love. In the same way friendship has one law; Never make ur friend feel alone until you are alive.... Gud night
spam PRIVATE! Your 2003 Account Statement for 07808247860 shows 800 un-redeemed S. I. M. points. Call 08719899229 Identifier Code: 40411 Expires 06/11/04
ham Apo all other are mokka players only
ham Perhaps * is much easy give your account identification, so i will tomorrow at UNI
ham Wait . I will msg after <#> min.
ham What i told before i tell. Stupid hear after i wont tell anything to you. You dad called to my brother and spoken. Not with me.
ham God's love has no limit. God's grace has no measure. God's power has no boundaries. May u have God's endless blessings always in ur life...!! Gud ni8
ham I want to be inside you every night...
ham Machan you go to gym tomorrow, i wil come late goodnight.
ham Lol they were mad at first but then they woke up and gave in.
ham I went to project centre
ham It‘s reassuring, in this crazy world.
ham Just making dinner, you ?
ham Yes. Please leave at <#> . So that at <#> we can leave
ham Oh... Okie lor...We go on sat...
ham You are a great role model. You are giving so much and i really wish each day for a miracle but God as a reason for everything and i must say i wish i knew why but i dont. I've looked up to you since i was young and i still do. Have a great day.
ham Ya, i'm referin to mei's ex wat... No ah, waitin 4 u to treat, somebody shld b rich liao...So gd, den u dun have to work frm tmr onwards...
ham Miles and smiles r made frm same letters but do u know d difference..? smile on ur face keeps me happy even though I am miles away from u.. :-)keep smiling.. Good nyt
ham By the way, i've put a skip right outside the front of the house so you can see which house it is. Just pull up before it.
ham Can you pls send me that company name. In saibaba colany
ham No. I dont want to hear anything
ham You are a big chic. Common. Declare
ham Thats cool. I want to please you...
ham Going to join tomorrow.
spam You are awarded a SiPix Digital Camera! call 09061221061 from landline. Delivery within 28days. T Cs Box177. M221BP. 2yr warranty. 150ppm. 16 . p p£3.99
ham I want to tell you how bad I feel that basically the only times I text you lately are when I need drugs
spam PRIVATE! Your 2003 Account Statement for shows 800 un-redeemed S.I.M. points. Call 08718738001 Identifier Code: 49557 Expires 26/11/04
ham Total disappointment, when I texted you was the craziest shit got :(
ham Its just the effect of irritation. Just ignore it
ham What about this one then.
ham I think that tantrum's finished so yeah I'll be by at some point
ham Compliments to you. Was away from the system. How your side.
ham happened here while you were adventuring
ham Hey chief, can you give me a bell when you get this. Need to talk to you about this royal visit on the 1st june.
ham Ok which your another number
ham I know you are thinkin malaria. But relax, children cant handle malaria. She would have been worse and its gastroenteritis. If she takes enough to replace her loss her temp will reduce. And if you give her malaria meds now she will just vomit. Its a self limiting illness she has which means in a few days it will completely stop
ham Aiyah ok wat as long as got improve can already wat...
spam Want explicit SEX in 30 secs? Ring 02073162414 now! Costs 20p/min Gsex POBOX 2667 WC1N 3XX
ham I can't believe how attached I am to seeing you every day. I know you will do the best you can to get to me babe. I will go to teach my class at your midnight
ham Just sleeping..and surfing
spam ASKED 3MOBILE IF 0870 CHATLINES INCLU IN FREE MINS. INDIA CUST SERVs SED YES. L8ER GOT MEGA BILL. 3 DONT GIV A SHIT. BAILIFF DUE IN DAYS. I O £250 3 WANT £800
ham Yeah it's jus rite...
ham Armand says get your ass over to epsilon
ham U still havent got urself a jacket ah?
ham I'm taking derek & taylor to walmart, if I'm not back by the time you're done just leave the mouse on my desk and I'll text you when priscilla's ready
ham Hi its in durban are you still on this number
ham Ic. There are a lotta childporn cars then.
spam Had your contract mobile 11 Mnths? Latest Motorola, Nokia etc. all FREE! Double Mins & Text on Orange tariffs. TEXT YES for callback, no to remove from records.
ham No, I was trying it all weekend ;V
ham You know, wot people wear. T shirts, jumpers, hat, belt, is all we know. We r at Cribbs
ham Cool, what time you think you can get here?
ham Wen did you get so spiritual and deep. That's great
ham Have a safe trip to Nigeria. Wish you happiness and very soon company to share moments with
ham Hahaha..use your brain dear
ham Well keep in mind I've only got enough gas for one more round trip barring a sudden influx of cash
ham Yeh. Indians was nice. Tho it did kane me off a bit he he. We shud go out 4 a drink sometime soon. Mite hav 2 go 2 da works 4 a laugh soon. Love Pete x x
ham Yes i have. So that's why u texted. Pshew...missing you so much
ham No. I meant the calculation is the same. That <#> units at <#> . This school is really expensive. Have you started practicing your accent. Because its important. And have you decided if you are doing 4years of dental school or if you'll just do the nmde exam.
ham Sorry, I'll call later
ham if you aren't here in the next <#> hours imma flip my shit
ham Anything lor. Juz both of us lor.
ham Get me out of this dump heap. My mom decided to come to lowes. BORING.
ham Ok lor... Sony ericsson salesman... I ask shuhui then she say quite gd 2 use so i considering...
ham Ard 6 like dat lor.
ham Why don't you wait 'til at least wednesday to see if you get your .
ham Huh y lei...
spam REMINDER FROM O2: To get 2.50 pounds free call credit and details of great offers pls reply 2 this text with your valid name, house no and postcode
spam This is the 2nd time we have tried 2 contact u. U have won the £750 Pound prize. 2 claim is easy, call 087187272008 NOW1! Only 10p per minute. BT-national-rate.
ham Will ü b going to esplanade fr home?
ham Pity, * was in mood for that. So...any other suggestions?
ham The guy did some bitching but I acted like i'd be interested in buying something else next week and he gave it to us for free
ham Rofl. Its true to its name
import pandas as pd
# Dataset available using filepath 'smsspamcollection/SMSSpamCollection'
df = pd.read_table("smsspamcollection/SMSSpamCollection", names=['label', 'sms_message'] )
# Output printing out first 5 rows
df[:5]_____no_output_____
</code>
### Step 1.2: Data Preprocessing ###
Now that we have a basic understanding of what our dataset looks like, let's convert our labels to binary variables, 0 to represent 'ham'(i.e. not spam) and 1 to represent 'spam' for ease of computation.
You might be wondering why do we need to do this step? The answer to this lies in how scikit-learn handles inputs. Scikit-learn only deals with numerical values and hence if we were to leave our label values as strings, scikit-learn would do the conversion internally(more specifically, the string labels will be cast to unknown float values).
Our model would still be able to make predictions if we left our labels as strings but we could have issues later when calculating performance metrics, for example when calculating our precision and recall scores. Hence, to avoid unexpected 'gotchas' later, it is good practice to have our categorical values be fed into our model as integers. _____no_output_____>**Instructions:**
* Convert the values in the 'label' column to numerical values using map method as follows:
{'ham':0, 'spam':1} This maps the 'ham' value to 0 and the 'spam' value to 1.
* Also, to get an idea of the size of the dataset we are dealing with, print out number of rows and columns using
'shape'._____no_output_____
<code>
'''
Solution
'''
df['names'] = df.label.map(lambda x: 1 if x == "spam" else 0)
_____no_output_____
</code>
### Step 2.1: Bag of Words ###
What we have here in our data set is a large collection of text data (5,572 rows of data). Most ML algorithms rely on numerical data to be fed into them as input, and email/sms messages are usually text heavy.
Here we'd like to introduce the Bag of Words (BoW) concept which is a term used to specify the problems that have a 'bag of words' or a collection of text data that needs to be worked with. The basic idea of BoW is to take a piece of text and count the frequency of the words in that text. It is important to note that the BoW concept treats each word individually and the order in which the words occur does not matter.
Using a process which we will go through now, we can convert a collection of documents to a matrix, with each document being a row and each word (token) being the column, and the corresponding (row, column) values being the frequency of occurrence of each word or token in that document.
For example:
Let's say we have 4 documents, which are text messages
in our case, as follows:
`['Hello, how are you!',
'Win money, win from home.',
'Call me now',
'Hello, Call you tomorrow?']`
Our objective here is to convert this set of texts to a frequency distribution matrix, as follows:
<img src="images/countvectorizer.png" height="542" width="542">
Here as we can see, the documents are numbered in the rows, and each word is a column name, with the corresponding value being the frequency of that word in the document.
Let's break this down and see how we can do this conversion using a small set of documents.
To handle this, we will be using sklearn's
[count vectorizer](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.CountVectorizer.html#sklearn.feature_extraction.text.CountVectorizer) method which does the following:
* It tokenizes the string (separates the string into individual words) and gives an integer ID to each token.
* It counts the occurrence of each of those tokens.
**Please Note:**
* The CountVectorizer method automatically converts all tokenized words to their lower case form so that it does not treat words like 'He' and 'he' differently. It does this using the `lowercase` parameter which is by default set to `True`.
* It also ignores all punctuation so that words followed by a punctuation mark (for example: 'hello!') are not treated differently than the same words not prefixed or suffixed by a punctuation mark (for example: 'hello'). It does this using the `token_pattern` parameter which has a default regular expression which selects tokens of 2 or more alphanumeric characters.
* The third parameter to take note of is the `stop_words` parameter. Stop words refer to the most commonly used words in a language. They include words like 'am', 'an', 'and', 'the', etc. By setting this parameter value to `english`, CountVectorizer will automatically ignore all words (from our input text) that are found in the built in list of English stop words in scikit-learn. This is extremely helpful as stop words can skew our calculations when we are trying to find certain key words that are indicative of spam.
We will dive into the application of each of these into our model in a later step, but for now it is important to be aware of such preprocessing techniques available to us when dealing with textual data._____no_output_____### Step 2.2: Implementing Bag of Words from scratch ###
Before we dive into scikit-learn's Bag of Words (BoW) library to do the dirty work for us, let's implement it ourselves first so that we can understand what's happening behind the scenes.
**Step 1: Convert all strings to their lower case form.**
Let's say we have a document set:
```
documents = ['Hello, how are you!',
'Win money, win from home.',
'Call me now.',
'Hello, Call hello you tomorrow?']
```
>>**Instructions:**
* Convert all the strings in the documents set to their lower case. Save them into a list called 'lower_case_documents'. You can convert strings to their lower case in python by using the lower() method.
_____no_output_____
<code>
'''
Solution:
'''
documents = ['Hello, how are you!',
'Win money, win from home.',
'Call me now.',
'Hello, Call hello you tomorrow?']
lower_case_documents = [w.lower() for w in documents]
print(lower_case_documents)['hello, how are you!', 'win money, win from home.', 'call me now.', 'hello, call hello you tomorrow?']
</code>
**Step 2: Removing all punctuation**
>>**Instructions:**
Remove all punctuation from the strings in the document set. Save the strings into a list called
'sans_punctuation_documents'. _____no_output_____
<code>
'''
Solution:
'''
punctuation = ",.?!"
import string
sans_punctuation_documents = [w.translate({ord(c): None for c in ".,_!?"})for w in lower_case_documents]
print(sans_punctuation_documents)['hello how are you', 'win money win from home', 'call me now', 'hello call hello you tomorrow']
</code>
**Step 3: Tokenization**
Tokenizing a sentence in a document set means splitting up the sentence into individual words using a delimiter. The delimiter specifies what character we will use to identify the beginning and end of a word. Most commonly, we use a single space as the delimiter character for identifying words, and this is true in our documents in this case also._____no_output_____>>**Instructions:**
Tokenize the strings stored in 'sans_punctuation_documents' using the split() method. Store the final document set
in a list called 'preprocessed_documents'.
_____no_output_____
<code>
'''
Solution:
'''
import itertools
preprocessed_documents = [w.split() for w in sans_punctuation_documents]
preprocessed_documents = list(itertools.chain(*preprocessed_documents))
print(preprocessed_documents)['hello', 'how', 'are', 'you', 'win', 'money', 'win', 'from', 'home', 'call', 'me', 'now', 'hello', 'call', 'hello', 'you', 'tomorrow']
</code>
**Step 4: Count frequencies**
Now that we have our document set in the required format, we can proceed to counting the occurrence of each word in each document of the document set. We will use the `Counter` method from the Python `collections` library for this purpose.
`Counter` counts the occurrence of each item in the list and returns a dictionary with the key as the item being counted and the corresponding value being the count of that item in the list. _____no_output_____>>**Instructions:**
Using the Counter() method and preprocessed_documents as the input, create a dictionary with the keys being each word in each document and the corresponding values being the frequency of occurrence of that word. Save each Counter dictionary as an item in a list called 'frequency_list'.
_____no_output_____
<code>
'''
Solution
'''
frequency_list = []
import pprint
from collections import Counter
frequency_list = Counter(preprocessed_documents)
pprint.pprint(frequency_list)Counter({'hello': 3,
'you': 2,
'win': 2,
'call': 2,
'how': 1,
'are': 1,
'money': 1,
'from': 1,
'home': 1,
'me': 1,
'now': 1,
'tomorrow': 1})
</code>
Congratulations! You have implemented the Bag of Words process from scratch! As we can see in our previous output, we have a frequency distribution dictionary which gives a clear view of the text that we are dealing with.
We should now have a solid understanding of what is happening behind the scenes in the `sklearn.feature_extraction.text.CountVectorizer` method of scikit-learn.
We will now implement `sklearn.feature_extraction.text.CountVectorizer` method in the next step._____no_output_____### Step 2.3: Implementing Bag of Words in scikit-learn ###
Now that we have implemented the BoW concept from scratch, let's go ahead and use scikit-learn to do this process in a clean and succinct way. We will use the same document set as we used in the previous step. _____no_output_____
<code>
'''
Here we will look to create a frequency matrix on a smaller document set to make sure we understand how the
document-term matrix generation happens. We have created a sample document set 'documents'.
'''
documents = ['Hello, how are you!',
'Win money, win from home.',
'Call me now.',
'Hello, Call hello you tomorrow?']_____no_output_____
</code>
>>**Instructions:**
Import the sklearn.feature_extraction.text.CountVectorizer method and create an instance of it called 'count_vector'. _____no_output_____
<code>
'''
Solution
'''
from sklearn.feature_extraction.text import CountVectorizer
count_vector = CountVectorizer(documents)
count_vector_____no_output_____
</code>
**Data preprocessing with CountVectorizer()**
In Step 2.2, we implemented a version of the CountVectorizer() method from scratch that entailed cleaning our data first. This cleaning involved converting all of our data to lower case and removing all punctuation marks. CountVectorizer() has certain parameters which take care of these steps for us. They are:
* `lowercase = True`
The `lowercase` parameter has a default value of `True` which converts all of our text to its lower case form.
* `token_pattern = (?u)\\b\\w\\w+\\b`
The `token_pattern` parameter has a default regular expression value of `(?u)\\b\\w\\w+\\b` which ignores all punctuation marks and treats them as delimiters, while accepting alphanumeric strings of length greater than or equal to 2, as individual tokens or words.
* `stop_words`
The `stop_words` parameter, if set to `english` will remove all words from our document set that match a list of English stop words defined in scikit-learn. Considering the small size of our dataset and the fact that we are dealing with SMS messages and not larger text sources like e-mail, we will not use stop words, and we won't be setting this parameter value.
You can take a look at all the parameter values of your `count_vector` object by simply printing out the object as follows:_____no_output_____
<code>
'''
Practice node:
Print the 'count_vector' object which is an instance of 'CountVectorizer()'
'''
# No need to revise this code
print(count_vector)CountVectorizer(analyzer='word', binary=False, decode_error='strict',
dtype=<class 'numpy.int64'>, encoding='utf-8',
input=['Hello, how are you!', 'Win money, win from home.', 'Call me now.', 'Hello, Call hello you tomorrow?'],
lowercase=True, max_df=1.0, max_features=None, min_df=1,
ngram_range=(1, 1), preprocessor=None, stop_words=None,
strip_accents=None, token_pattern='(?u)\\b\\w\\w+\\b',
tokenizer=None, vocabulary=None)
</code>
>>**Instructions:**
Fit your document dataset to the CountVectorizer object you have created using fit(), and get the list of words
which have been categorized as features using the get_feature_names() method._____no_output_____
<code>
'''
Solution:
'''
# No need to revise this code
count_vector.fit(documents)
count_vector.get_feature_names()_____no_output_____
</code>
The `get_feature_names()` method returns our feature names for this dataset, which is the set of words that make up our vocabulary for 'documents'._____no_output_____>>**Instructions:**
Create a matrix with each row representing one of the 4 documents, and each column representing a word (feature name).
Each value in the matrix will represent the frequency of the word in that column occurring in the particular document in that row.
You can do this using the transform() method of CountVectorizer, passing in the document data set as the argument. The transform() method returns a matrix of NumPy integers, which you can convert to an array using
toarray(). Call the array 'doc_array'.
_____no_output_____
<code>
'''
Solution
'''
doc_array = [d for d in documents]
doc_array_____no_output_____
</code>
Now we have a clean representation of the documents in terms of the frequency distribution of the words in them. To make it easier to understand our next step is to convert this array into a dataframe and name the columns appropriately._____no_output_____>>**Instructions:**
Convert the 'doc_array' we created into a dataframe, with the column names as the words (feature names). Call the dataframe 'frequency_matrix'.
_____no_output_____
<code>
'''
Solution
'''
import pandas as pd
frequency_matrix = pd.DataFrame(doc_array)
frequency_matrix_____no_output_____
</code>
Congratulations! You have successfully implemented a Bag of Words problem for a document dataset that we created.
One potential issue that can arise from using this method is that if our dataset of text is extremely large (say if we have a large collection of news articles or email data), there will be certain values that are more common than others simply due to the structure of the language itself. For example, words like 'is', 'the', 'an', pronouns, grammatical constructs, etc., could skew our matrix and affect our analyis.
There are a couple of ways to mitigate this. One way is to use the `stop_words` parameter and set its value to `english`. This will automatically ignore all the words in our input text that are found in a built-in list of English stop words in scikit-learn.
Another way of mitigating this is by using the [tfidf](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.TfidfVectorizer.html#sklearn.feature_extraction.text.TfidfVectorizer) method. This method is out of scope for the context of this lesson._____no_output_____### Step 3.1: Training and testing sets ###
Now that we understand how to use the Bag of Words approach, we can return to our original, larger UCI dataset and proceed with our analysis. Our first step is to split our dataset into a training set and a testing set so we can first train, and then test our model. _____no_output_____
>>**Instructions:**
Split the dataset into a training and testing set using the train_test_split method in sklearn, and print out the number of rows we have in each of our training and testing data. Split the data
using the following variables:
* `X_train` is our training data for the 'sms_message' column.
* `y_train` is our training data for the 'label' column
* `X_test` is our testing data for the 'sms_message' column.
* `y_test` is our testing data for the 'label' column.
_____no_output_____
<code>
'''
Solution
'''
# split into training and testing sets
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(df['sms_message'],
df['label'],
random_state=1)
print('Number of rows in the total set: {}'.format(df.shape[0]))
print('Number of rows in the training set: {}'.format(X_train.shape[0]))
print('Number of rows in the test set: {}'.format(X_test.shape[0]))_____no_output_____
</code>
### Step 3.2: Applying Bag of Words processing to our dataset. ###
Now that we have split the data, our next objective is to follow the steps from "Step 2: Bag of Words," and convert our data into the desired matrix format. To do this we will be using CountVectorizer() as we did before. There are two steps to consider here:
* First, we have to fit our training data (`X_train`) into `CountVectorizer()` and return the matrix.
* Secondly, we have to transform our testing data (`X_test`) to return the matrix.
Note that `X_train` is our training data for the 'sms_message' column in our dataset and we will be using this to train our model.
`X_test` is our testing data for the 'sms_message' column and this is the data we will be using (after transformation to a matrix) to make predictions on. We will then compare those predictions with `y_test` in a later step.
For now, we have provided the code that does the matrix transformations for you!_____no_output_____
<code>
'''
[Practice Node]
The code for this segment is in 2 parts. First, we are learning a vocabulary dictionary for the training data
and then transforming the data into a document-term matrix; secondly, for the testing data we are only
transforming the data into a document-term matrix.
This is similar to the process we followed in Step 2.3.
We will provide the transformed data to students in the variables 'training_data' and 'testing_data'.
'''_____no_output_____'''
Solution
'''
# Instantiate the CountVectorizer method
count_vector = CountVectorizer()
# Fit the training data and then return the matrix
training_data = count_vector.fit_transform(X_train)
# Transform testing data and return the matrix. Note we are not fitting the testing data into the CountVectorizer()
testing_data = count_vector.transform(X_test)_____no_output_____
</code>
### Step 4.1: Bayes Theorem implementation from scratch ###
Now that we have our dataset in the format that we need, we can move onto the next portion of our mission which is the algorithm we will use to make our predictions to classify a message as spam or not spam. Remember that at the start of the mission we briefly discussed the Bayes theorem but now we shall go into a little more detail. In layman's terms, the Bayes theorem calculates the probability of an event occurring, based on certain other probabilities that are related to the event in question. It is composed of "prior probabilities" - or just "priors." These "priors" are the probabilities that we are aware of, or that are given to us. And Bayes theorem is also composed of the "posterior probabilities," or just "posteriors," which are the probabilities we are looking to compute using the "priors".
Let us implement the Bayes Theorem from scratch using a simple example. Let's say we are trying to find the odds of an individual having diabetes, given that he or she was tested for it and got a positive result.
In the medical field, such probabilities play a very important role as they often deal with life and death situations.
We assume the following:
`P(D)` is the probability of a person having Diabetes. Its value is `0.01`, or in other words, 1% of the general population has diabetes (disclaimer: these values are assumptions and are not reflective of any actual medical study).
`P(Pos)` is the probability of getting a positive test result.
`P(Neg)` is the probability of getting a negative test result.
`P(Pos|D)` is the probability of getting a positive result on a test done for detecting diabetes, given that you have diabetes. This has a value `0.9`. In other words the test is correct 90% of the time. This is also called the Sensitivity or True Positive Rate.
`P(Neg|~D)` is the probability of getting a negative result on a test done for detecting diabetes, given that you do not have diabetes. This also has a value of `0.9` and is therefore correct, 90% of the time. This is also called the Specificity or True Negative Rate.
The Bayes formula is as follows:
<img src="images/bayes_formula.png" height="242" width="242">
* `P(A)` is the prior probability of A occurring independently. In our example this is `P(D)`. This value is given to us.
* `P(B)` is the prior probability of B occurring independently. In our example this is `P(Pos)`.
* `P(A|B)` is the posterior probability that A occurs given B. In our example this is `P(D|Pos)`. That is, **the probability of an individual having diabetes, given that this individual got a positive test result. This is the value that we are looking to calculate.**
* `P(B|A)` is the prior probability of B occurring, given A. In our example this is `P(Pos|D)`. This value is given to us._____no_output_____Putting our values into the formula for Bayes theorem we get:
`P(D|Pos) = P(D) * P(Pos|D) / P(Pos)`
The probability of getting a positive test result `P(Pos)` can be calculated using the Sensitivity and Specificity as follows:
`P(Pos) = [P(D) * Sensitivity] + [P(~D) * (1-Specificity))]`_____no_output_____
<code>
'''
Instructions:
Calculate probability of getting a positive test result, P(Pos)
'''_____no_output_____'''
Solution (skeleton code will be provided)
'''
# P(D)
p_diabetes = 0.01
# P(~D)
p_no_diabetes = 0.99
# Sensitivity or P(Pos|D)
p_pos_diabetes = 0.9
# Specificity or P(Neg|~D)
p_neg_no_diabetes = 0.9
# P(Pos)
p_pos = # TODO
print('The probability of getting a positive test result P(Pos) is: {}',format(p_pos))_____no_output_____
</code>
**Using all of this information we can calculate our posteriors as follows:**
The probability of an individual having diabetes, given that, that individual got a positive test result:
`P(D|Pos) = (P(D) * Sensitivity)) / P(Pos)`
The probability of an individual not having diabetes, given that, that individual got a positive test result:
`P(~D|Pos) = (P(~D) * (1-Specificity)) / P(Pos)`
The sum of our posteriors will always equal `1`. _____no_output_____
<code>
'''
Instructions:
Compute the probability of an individual having diabetes, given that, that individual got a positive test result.
In other words, compute P(D|Pos).
The formula is: P(D|Pos) = (P(D) * P(Pos|D) / P(Pos)
'''_____no_output_____'''
Solution
'''
# P(D|Pos)
p_diabetes_pos = # TODO
print('Probability of an individual having diabetes, given that that individual got a positive test result is:\
',format(p_diabetes_pos)) _____no_output_____'''
Instructions:
Compute the probability of an individual not having diabetes, given that, that individual got a positive test result.
In other words, compute P(~D|Pos).
The formula is: P(~D|Pos) = P(~D) * P(Pos|~D) / P(Pos)
Note that P(Pos|~D) can be computed as 1 - P(Neg|~D).
Therefore:
P(Pos|~D) = p_pos_no_diabetes = 1 - 0.9 = 0.1
'''_____no_output_____'''
Solution
'''
# P(Pos|~D)
p_pos_no_diabetes = 0.1
# P(~D|Pos)
p_no_diabetes_pos = # TODO
print 'Probability of an individual not having diabetes, given that that individual got a positive test result is:'\
,p_no_diabetes_pos_____no_output_____
</code>
Congratulations! You have implemented Bayes Theorem from scratch. Your analysis shows that even if you get a positive test result, there is only an 8.3% chance that you actually have diabetes and a 91.67% chance that you do not have diabetes. This is of course assuming that only 1% of the entire population has diabetes which is only an assumption._____no_output_____**What does the term 'Naive' in 'Naive Bayes' mean ?**
The term 'Naive' in Naive Bayes comes from the fact that the algorithm considers the features that it is using to make the predictions to be independent of each other, which may not always be the case. So in our Diabetes example, we are considering only one feature, that is the test result. Say we added another feature, 'exercise'. Let's say this feature has a binary value of `0` and `1`, where the former signifies that the individual exercises less than or equal to 2 days a week and the latter signifies that the individual exercises greater than or equal to 3 days a week. If we had to use both of these features, namely the test result and the value of the 'exercise' feature, to compute our final probabilities, Bayes' theorem would fail. Naive Bayes' is an extension of Bayes' theorem that assumes that all the features are independent of each other. _____no_output_____### Step 4.2: Naive Bayes implementation from scratch ###
_____no_output_____Now that you have understood the ins and outs of Bayes Theorem, we will extend it to consider cases where we have more than one feature.
Let's say that we have two political parties' candidates, 'Jill Stein' of the Green Party and 'Gary Johnson' of the Libertarian Party and we have the probabilities of each of these candidates saying the words 'freedom', 'immigration' and 'environment' when they give a speech:
* Probability that Jill Stein says 'freedom': 0.1 ---------> `P(F|J)`
* Probability that Jill Stein says 'immigration': 0.1 -----> `P(I|J)`
* Probability that Jill Stein says 'environment': 0.8 -----> `P(E|J)`
* Probability that Gary Johnson says 'freedom': 0.7 -------> `P(F|G)`
* Probability that Gary Johnson says 'immigration': 0.2 ---> `P(I|G)`
* Probability that Gary Johnson says 'environment': 0.1 ---> `P(E|G)`
And let us also assume that the probability of Jill Stein giving a speech, `P(J)` is `0.5` and the same for Gary Johnson, `P(G) = 0.5`.
Given this, what if we had to find the probabilities of Jill Stein saying the words 'freedom' and 'immigration'? This is where the Naive Bayes' theorem comes into play as we are considering two features, 'freedom' and 'immigration'.
Now we are at a place where we can define the formula for the Naive Bayes' theorem:
<img src="images/naivebayes.png" height="342" width="342">
Here, `y` is the class variable (in our case the name of the candidate) and `x1` through `xn` are the feature vectors (in our case the individual words). The theorem makes the assumption that each of the feature vectors or words (`xi`) are independent of each other._____no_output_____To break this down, we have to compute the following posterior probabilities:
* `P(J|F,I)`: Given the words 'freedom' and 'immigration' were said, what's the probability they were said by Jill?
Using the formula and our knowledge of Bayes' theorem, we can compute this as follows: `P(J|F,I)` = `(P(J) * P(F|J) * P(I|J)) / P(F,I)`. Here `P(F,I)` is the probability of the words 'freedom' and 'immigration' being said in a speech.
* `P(G|F,I)`: Given the words 'freedom' and 'immigration' were said, what's the probability they were said by Gary?
Using the formula, we can compute this as follows: `P(G|F,I)` = `(P(G) * P(F|G) * P(I|G)) / P(F,I)`_____no_output_____
<code>
'''
Instructions: Compute the probability of the words 'freedom' and 'immigration' being said in a speech, or
P(F,I).
The first step is multiplying the probabilities of Jill Stein giving a speech with her individual
probabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_j_text.
The second step is multiplying the probabilities of Gary Johnson giving a speech with his individual
probabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_g_text.
The third step is to add both of these probabilities and you will get P(F,I).
'''_____no_output_____'''
Solution: Step 1
'''
# P(J)
p_j = 0.5
# P(F/J)
p_j_f = 0.1
# P(I/J)
p_j_i = 0.1
p_j_text = # TODO
print(p_j_text)_____no_output_____'''
Solution: Step 2
'''
# P(G)
p_g = 0.5
# P(F/G)
p_g_f = 0.7
# P(I/G)
p_g_i = 0.2
p_g_text = # TODO
print(p_g_text)_____no_output_____'''
Solution: Step 3: Compute P(F,I) and store in p_f_i
'''
p_f_i = # TODO
print('Probability of words freedom and immigration being said are: ', format(p_f_i))_____no_output_____
</code>
Now we can compute the probability of `P(J|F,I)`, the probability of Jill Stein saying the words 'freedom' and 'immigration' and `P(G|F,I)`, the probability of Gary Johnson saying the words 'freedom' and 'immigration'._____no_output_____
<code>
'''
Instructions:
Compute P(J|F,I) using the formula P(J|F,I) = (P(J) * P(F|J) * P(I|J)) / P(F,I) and store it in a variable p_j_fi
'''_____no_output_____'''
Solution
'''
p_j_fi = # TODO
print('The probability of Jill Stein saying the words Freedom and Immigration: ', format(p_j_fi))_____no_output_____'''
Instructions:
Compute P(G|F,I) using the formula P(G|F,I) = (P(G) * P(F|G) * P(I|G)) / P(F,I) and store it in a variable p_g_fi
'''_____no_output_____'''
Solution
'''
p_g_fi = # TODO
print('The probability of Gary Johnson saying the words Freedom and Immigration: ', format(p_g_fi))_____no_output_____
</code>
And as we can see, just like in the Bayes' theorem case, the sum of our posteriors is equal to 1.
Congratulations! You have implemented the Naive Bayes' theorem from scratch. Our analysis shows that there is only a 6.6% chance that Jill Stein of the Green Party uses the words 'freedom' and 'immigration' in her speech as compared with the 93.3% chance for Gary Johnson of the Libertarian party._____no_output_____For another example of Naive Bayes, let's consider searching for images using the term 'Sacramento Kings' in a search engine. In order for us to get the results pertaining to the Scramento Kings NBA basketball team, the search engine needs to be able to associate the two words together and not treat them individually. If the search engine only searched for the words individually, we would get results of images tagged with 'Sacramento,' like pictures of city landscapes, and images of 'Kings,' which might be pictures of crowns or kings from history. But associating the two terms together would produce images of the basketball team. In the first approach we would treat the words as independent entities, so it would be considered 'naive.' We don't usually want this approach from a search engine, but it can be extremely useful in other cases.
Applying this to our problem of classifying messages as spam, the Naive Bayes algorithm *looks at each word individually and not as associated entities* with any kind of link between them. In the case of spam detectors, this usually works, as there are certain red flag words in an email which are highly reliable in classifying it as spam. For example, emails with words like 'viagra' are usually classified as spam._____no_output_____### Step 5: Naive Bayes implementation using scikit-learn ###
Now let's return to our spam classification context. Thankfully, sklearn has several Naive Bayes implementations that we can use, so we do not have to do the math from scratch. We will be using sklearn's `sklearn.naive_bayes` method to make predictions on our SMS messages dataset.
Specifically, we will be using the multinomial Naive Bayes algorithm. This particular classifier is suitable for classification with discrete features (such as in our case, word counts for text classification). It takes in integer word counts as its input. On the other hand, Gaussian Naive Bayes is better suited for continuous data as it assumes that the input data has a Gaussian (normal) distribution._____no_output_____
<code>
'''
Instructions:
We have loaded the training data into the variable 'training_data' and the testing data into the
variable 'testing_data'.
Import the MultinomialNB classifier and fit the training data into the classifier using fit(). Name your classifier
'naive_bayes'. You will be training the classifier using 'training_data' and 'y_train' from our split earlier.
'''_____no_output_____'''
Solution
'''
from sklearn.naive_bayes import MultinomialNB
naive_bayes = # TODO
naive_bayes.fit(# TODO)_____no_output_____'''
Instructions:
Now that our algorithm has been trained using the training data set we can now make some predictions on the test data
stored in 'testing_data' using predict(). Save your predictions into the 'predictions' variable.
'''_____no_output_____'''
Solution
'''
predictions = naive_bayes.predict(# TODO)_____no_output_____
</code>
Now that predictions have been made on our test set, we need to check the accuracy of our predictions._____no_output_____### Step 6: Evaluating our model ###
Now that we have made predictions on our test set, our next goal is to evaluate how well our model is doing. There are various mechanisms for doing so, so first let's review them.
**Accuracy** measures how often the classifier makes the correct prediction. It’s the ratio of the number of correct predictions to the total number of predictions (the number of test data points).
**Precision** tells us what proportion of messages we classified as spam, actually were spam.
It is a ratio of true positives (words classified as spam, and which actually are spam) to all positives (all words classified as spam, regardless of whether that was the correct classification). In other words, precision is the ratio of
`[True Positives/(True Positives + False Positives)]`
**Recall (sensitivity)** tells us what proportion of messages that actually were spam were classified by us as spam.
It is a ratio of true positives (words classified as spam, and which actually are spam) to all the words that were actually spam. In other words, recall is the ratio of
`[True Positives/(True Positives + False Negatives)]`
For classification problems that are skewed in their classification distributions like in our case - for example if we had 100 text messages and only 2 were spam and the other 98 weren't - accuracy by itself is not a very good metric. We could classify 90 messages as not spam (including the 2 that were spam but we classify them as not spam, hence they would be false negatives) and 10 as spam (all 10 false positives) and still get a reasonably good accuracy score. For such cases, precision and recall come in very handy. These two metrics can be combined to get the **F1 score**, which is the weighted average of the precision and recall scores. This score can range from 0 to 1, with 1 being the best possible F1 score._____no_output_____We will be using all 4 of these metrics to make sure our model does well. For all 4 metrics whose values can range from 0 to 1, having a score as close to 1 as possible is a good indicator of how well our model is doing._____no_output_____
<code>
'''
Instructions:
Compute the accuracy, precision, recall and F1 scores of your model using your test data 'y_test' and the predictions
you made earlier stored in the 'predictions' variable.
'''_____no_output_____'''
Solution
'''
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
print('Accuracy score: ', format(accuracy_score(# TODO)))
print('Precision score: ', format(precision_score(# TODO)))
print('Recall score: ', format(recall_score(# TODO)))
print('F1 score: ', format(f1_score(# TODO)))_____no_output_____
</code>
### Step 7: Conclusion ###
One of the major advantages that Naive Bayes has over other classification algorithms is its ability to handle an extremely large number of features. In our case, each word is treated as a feature and there are thousands of different words. Also, it performs well even with the presence of irrelevant features and is relatively unaffected by them. The other major advantage it has is its relative simplicity. Naive Bayes' works well right out of the box and tuning its parameters is rarely ever necessary, except usually in cases where the distribution of the data is known.
It rarely ever overfits the data. Another important advantage is that its model training and prediction times are very fast for the amount of data it can handle. All in all, Naive Bayes' really is a gem of an algorithm!
Congratulations! You have successfully designed a model that can efficiently predict if an SMS message is spam or not!
Thank you for learning with us!_____no_output_____
| {
"repository": "psnx/artificial-intelligence",
"path": "Exercises/4_Bayesian_Interference/Bayesian_Inference.ipynb",
"matched_keywords": [
"STAR",
"Salmon"
],
"stars": null,
"size": 617289,
"hexsha": "d09a641ff90ef8c35714592b5a325c544ede0c90",
"max_line_length": 928,
"avg_line_length": 87.3975647742,
"alphanum_fraction": 0.672004523
} |
# Notebook from ben-heil/saged
Path: notebook/debugging/data_similarity.ipynb
# Data Similarity
Previous experiments have had some strange results, with models occasionally performing abnormally well (or badly) on the out of sample set. To make sure that there are no duplicate samples or abnormally similar studies, I made this notebook_____no_output_____
<code>
import json
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import yaml
from plotnine import *
from sklearn.metrics.pairwise import euclidean_distances
from saged import utils, datasets, models_____no_output_____
</code>
## Load the data_____no_output_____
<code>
dataset_config_file = '../../dataset_configs/refinebio_labeled_dataset.yml'_____no_output_____dataset_config_str = """name: "RefineBioMixedDataset"
compendium_path: "../../data/subset_compendium.pkl"
metadata_path: "../../data/aggregated_metadata.json"
label_path: "../../data/sample_classifications.pkl"
"""
dataset_config = yaml.safe_load(dataset_config_str)
dataset_name = dataset_config.pop('name')
MixedDatasetClass = datasets.RefineBioMixedDataset
all_data = MixedDatasetClass.from_config(**dataset_config) _____no_output_____
</code>
## Look for samples that are very similar to each other despite having different IDs_____no_output_____
<code>
sample_names = all_data.get_samples()
assert len(sample_names) == len(set(sample_names))
sample_names[:5]_____no_output_____expression = all_data.get_all_data()
print(len(sample_names))
print(expression.shape)10234
(10234, 14634)
sample_distance_matrix = euclidean_distances(expression, expression)
# This is unrelated to debugging the data, I'm just curious
gene_distance_matrix = euclidean_distances(expression.T, expression.T)_____no_output_____sample_distance_matrix.shape_____no_output_____sample_distance_matrix_____no_output_____# See if there are any zero distances outside the diagonal
num_zeros = 10234 * 10234 - np.count_nonzero(sample_distance_matrix)
num_zeros_____no_output_____
</code>
Since there are as many zeros as elements in the diagonal, there are no duplicate samples with different IDs (unless noise was added somewhere)_____no_output_____### Get all distances
Because we know there aren't any zeros outside of the diagonal, we can zero out the lower diagonal and use the the non-zero entries of the upper diagonal to visualize the distance distribution_____no_output_____
<code>
triangle = np.triu(sample_distance_matrix, k=0)
triangle_____no_output_____distances = triangle.flatten()
nonzero_distances = distances[distances != 0]
nonzero_distances.shape_____no_output_____plt.hist(nonzero_distances, bins=20)_____no_output_____
</code>
Distribution looks bimodal, probably due to different platforms having different distances from each other?_____no_output_____
<code>
plt.hist(nonzero_distances[nonzero_distances < 200])_____no_output_____plt.hist(nonzero_distances[nonzero_distances < 100])_____no_output_____
</code>
Looks like there may be some samples that are abnormally close to each other. I wonder whether they're in the same study_____no_output_____## Correspondence between distance and study_____no_output_____
<code>
# There is almost certainly a vectorized way of doing this but oh well
distances = []
first_samples = []
second_samples = []
for row_index in range(sample_distance_matrix.shape[0]):
for col_index in range(sample_distance_matrix.shape[0]):
distance = sample_distance_matrix[row_index, col_index]
if distance == 0:
continue
distances.append(distance)
first_samples.append(sample_names[row_index])
second_samples.append(sample_names[col_index])_____no_output_____distance_df = pd.DataFrame({'distance': distances, 'sample_1': first_samples,
'sample_2': second_samples})_____no_output_____# Free up memory to prevent swapping (probably hopeless if the user has < 32GB)
del(triangle)
del(sample_distance_matrix)
del(distances)
del(first_samples)
del(second_samples)
del(nonzero_distances)_____no_output_____distance_df_____no_output_____sample_to_study = all_data.sample_to_study_____no_output_____del(all_data)_____no_output_____distance_df['study_1'] = distance_df['sample_1'].map(sample_to_study)
distance_df['study_2'] = distance_df['sample_2'].map(sample_to_study)
distance_df['same_study'] = distance_df['study_1'] == distance_df['study_2']_____no_output_____distance_df.head()_____no_output_____print(len(distance_df))104723274
</code>
For some reason my computer didn't want me to make a figure with 50 million points. We'll work with means instead_____no_output_____
<code>
means_df = distance_df.groupby(['study_1', 'same_study']).mean()_____no_output_____means_df_____no_output_____means_df = means_df.unstack(level='same_study')
means_df = means_df.reset_index()
means_df.head()_____no_output_____# Get rid of the multilevel confusion
means_df.columns = means_df.columns.droplevel()
means_df.columns = ['study_name', 'distance_to_other', 'distance_to_same']
means_df['difference'] = means_df['distance_to_other'] - means_df['distance_to_same']
means_df.head()_____no_output_____plot = ggplot(means_df, aes(x='study_name', y='difference'))
plot += geom_point()
plot += ylab('out of study - in-study mean')
plot_____no_output_____means_df.sort_values(by='difference')_____no_output_____
</code>
These results indicate that most of the data is behaving as expected (the distance between pairs of samples from different studies is less than the distance between pairs of samples within the same study).
The outliers are mostly bead-chip, which makes sense (though they shouldn't be in the dataset and I'll need to look more closely at that later). The one exception is SRP049820 which is run on an Illumina Genome Analyzer II. Maybe it's due to the old tech?_____no_output_____## Without BE Correction_____no_output_____
<code>
%reset -f
# Calling reset because the notebook runs out of memory otherwise
import json
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import yaml
from plotnine import *
from sklearn.metrics.pairwise import euclidean_distances
from saged import utils, datasets, models_____no_output_____dataset_config_file = '../../dataset_configs/refinebio_labeled_dataset.yml'
dataset_config_str = """name: "RefineBioMixedDataset"
compendium_path: "../../data/subset_compendium.pkl"
metadata_path: "../../data/aggregated_metadata.json"
label_path: "../../data/sample_classifications.pkl"
"""
dataset_config = yaml.safe_load(dataset_config_str)
dataset_name = dataset_config.pop('name')
MixedDatasetClass = datasets.RefineBioMixedDataset
all_data = MixedDatasetClass.from_config(**dataset_config) _____no_output_____# Correct for batch effects
all_data = datasets.correct_batch_effects(all_data, 'limma')_____no_output_____
</code>
## Look for samples that are very similar to each other despite having different IDs_____no_output_____
<code>
sample_names = all_data.get_samples()
assert len(sample_names) == len(set(sample_names))
sample_names[:5]_____no_output_____expression = all_data.get_all_data()
print(len(sample_names))
print(expression.shape)10234
(10234, 14634)
sample_distance_matrix = euclidean_distances(expression, expression)
# This is unrelated to debugging the data, I'm just curious
gene_distance_matrix = euclidean_distances(expression.T, expression.T)_____no_output_____sample_distance_matrix.shape_____no_output_____sample_distance_matrix_____no_output_____# See if there are any zero distances outside the diagonal
num_zeros = 10234 * 10234 - np.count_nonzero(sample_distance_matrix)
num_zeros_____no_output_____
</code>
Since there are as many zeros as elements in the diagonal, there are no duplicate samples with different IDs (unless noise was added somewhere)_____no_output_____### Get all distances
Because we know there aren't any zeros outside of the diagonal, we can zero out the lower diagonal and use the the non-zero entries of the upper diagonal to visualize the distance distribution_____no_output_____
<code>
triangle = np.triu(sample_distance_matrix, k=0)
triangle_____no_output_____distances = triangle.flatten()
nonzero_distances = distances[distances != 0]
nonzero_distances.shape_____no_output_____plt.hist(nonzero_distances, bins=20)_____no_output_____
</code>
Distribution looks bimodal, probably due to different platforms having different distances from each other?_____no_output_____
<code>
plt.hist(nonzero_distances[nonzero_distances < 200])_____no_output_____plt.hist(nonzero_distances[nonzero_distances < 100])_____no_output_____
</code>
Looks like there may be some samples that are abnormally close to each other. I wonder whether they're in the same study_____no_output_____## Correspondence between distance and study_____no_output_____
<code>
# There is almost certainly a vectorized way of doing this but oh well
distances = []
first_samples = []
second_samples = []
for row_index in range(sample_distance_matrix.shape[0]):
for col_index in range(sample_distance_matrix.shape[0]):
distance = sample_distance_matrix[row_index, col_index]
if distance == 0:
continue
distances.append(distance)
first_samples.append(sample_names[row_index])
second_samples.append(sample_names[col_index])_____no_output_____distance_df = pd.DataFrame({'distance': distances, 'sample_1': first_samples,
'sample_2': second_samples})_____no_output_____# Free up memory to prevent swapping (probably hopeless if the user has < 32GB)
del(triangle)
del(sample_distance_matrix)
del(distances)
del(first_samples)
del(second_samples)
del(nonzero_distances)_____no_output_____distance_df_____no_output_____sample_to_study = all_data.sample_to_study_____no_output_____del(all_data)_____no_output_____distance_df['study_1'] = distance_df['sample_1'].map(sample_to_study)
distance_df['study_2'] = distance_df['sample_2'].map(sample_to_study)
distance_df['same_study'] = distance_df['study_1'] == distance_df['study_2']_____no_output_____distance_df.head()_____no_output_____print(len(distance_df))104724522
</code>
For some reason my computer didn't want me to make a figure with 50 million points. We'll work with means instead_____no_output_____
<code>
means_df = distance_df.groupby(['study_1', 'same_study']).mean()_____no_output_____means_df_____no_output_____means_df = means_df.unstack(level='same_study')
means_df = means_df.reset_index()
means_df.head()_____no_output_____# Get rid of the multilevel confusion
means_df.columns = means_df.columns.droplevel()
means_df.columns = ['study_name', 'distance_to_other', 'distance_to_same']
means_df['difference'] = means_df['distance_to_other'] - means_df['distance_to_same']
means_df.head()_____no_output_____plot = ggplot(means_df, aes(x='study_name', y='difference'))
plot += geom_point()
plot += ylab('out of study - in-study mean')
plot_____no_output_____means_df.sort_values(by='difference')_____no_output_____
</code>
These results indicate that most of the data is behaving as expected (the distance between pairs of samples from different studies is less than the distance between pairs of samples within the same study).
The outliers are mostly bead-chip, which makes sense (though they shouldn't be in the dataset and I'll need to look more closely at that later). The one exception is SRP049820 which is run on an Illumina Genome Analyzer II. Maybe it's due to the old tech?_____no_output_____
| {
"repository": "ben-heil/saged",
"path": "notebook/debugging/data_similarity.ipynb",
"matched_keywords": [
"limma"
],
"stars": 3,
"size": 200227,
"hexsha": "d09b13c518f7a63ad2347538123bef465e2b1b4e",
"max_line_length": 44324,
"avg_line_length": 79.8035073735,
"alphanum_fraction": 0.7679383899
} |
# Notebook from smoe/SSUsearch
Path: notebooks/ssu-search-Copy4.ipynb
###Set up working directory_____no_output_____
<code>
cd /usr/local/notebooks/usr/local/notebooks
mkdir -p ./workdir_____no_output_____#check seqfile files to process in data directory (make sure you still remember the data directory)
!ls ./data/test/data1c.fa 1d.fa 2c.fa 2d.fa
</code>
#README
## This part of pipeline search for the SSU rRNA gene fragments, classify them, and extract reads aligned specific region. It is also heavy lifting part of the whole pipeline (more cpu will help).
## This part works with one seqfile a time. You just need to change the "Seqfile" and maybe other parameters in the two cells bellow.
## To run commands, click "Cell" then "Run All". After it finishes, you will see "\*** pipeline runs successsfully :)" at bottom of this pape.
##If your computer has many processors, there are two ways to make use of the resource:
1. Set "Cpu" higher number.
2. make more copies of this notebook (click "File" then "Make a copy" in menu bar), so you can run the step on multiple files at the same time.
(Again we assume the "Seqfile" is quality trimmed.)
###Here we will process one file at a time; set the "Seqfile" variable to the seqfile name to be be processed
###First part of seqfile basename (separated by ".") will be the label of this sample, so named it properly.
e.g. for "/usr/local/notebooks/data/test/data/1c.fa", "1c" will the label of this sample._____no_output_____
<code>
Seqfile='./data/test/data/2d.fa'_____no_output_____
</code>
###Other parameters to set_____no_output_____
<code>
Cpu='2' # number of maxixum threads for search and alignment
Hmm='./data/SSUsearch_db/Hmm.ssu.hmm' # hmm model for ssu
Gene='ssu'
Script_dir='./SSUsearch/scripts'
Gene_model_org='./data/SSUsearch_db/Gene_model_org.16s_ecoli_J01695.fasta'
Ali_template='./data/SSUsearch_db/Ali_template.silva_ssu.fasta'
Start='577' #pick regions for de novo clustering
End='727'
Len_cutoff='100' # min length for reads picked for the region
Gene_tax='./data/SSUsearch_db/Gene_tax.silva_taxa_family.tax' # silva 108 ref
Gene_db='./data/SSUsearch_db/Gene_db.silva_108_rep_set.fasta'
Gene_tax_cc='./data/SSUsearch_db/Gene_tax_cc.greengene_97_otus.tax' # greengene 2012.10 ref for copy correction
Gene_db_cc='./data/SSUsearch_db/Gene_db_cc.greengene_97_otus.fasta'_____no_output_____# first part of file basename will the label of this sample
import os
Filename=os.path.basename(Seqfile)
Tag=Filename.split('.')[0]_____no_output_____import os
Hmm=os.path.abspath(Hmm)
Seqfile=os.path.abspath(Seqfile)
Script_dir=os.path.abspath(Script_dir)
Gene_model_org=os.path.abspath(Gene_model_org)
Ali_template=os.path.abspath(Ali_template)
Gene_tax=os.path.abspath(Gene_tax)
Gene_db=os.path.abspath(Gene_db)
Gene_tax_cc=os.path.abspath(Gene_tax_cc)
Gene_db_cc=os.path.abspath(Gene_db_cc)
os.environ.update(
{'Cpu':Cpu,
'Hmm':os.path.abspath(Hmm),
'Gene':Gene,
'Seqfile':os.path.abspath(Seqfile),
'Filename':Filename,
'Tag':Tag,
'Script_dir':os.path.abspath(Script_dir),
'Gene_model_org':os.path.abspath(Gene_model_org),
'Ali_template':os.path.abspath(Ali_template),
'Start':Start,
'End':End,
'Len_cutoff':Len_cutoff,
'Gene_tax':os.path.abspath(Gene_tax),
'Gene_db':os.path.abspath(Gene_db),
'Gene_tax_cc':os.path.abspath(Gene_tax_cc),
'Gene_db_cc':os.path.abspath(Gene_db_cc)})_____no_output_____!echo "*** make sure: parameters are right"
!echo "Seqfile: $Seqfile\nCpu: $Cpu\nFilename: $Filename\nTag: $Tag"*** make sure: parameters are right
Seqfile: /usr/local/notebooks/data/test/data/1c.fa
Cpu: 2
Filename: 1c.fa
Tag: 1c
cd workdir/usr/local/notebooks/workdir
mkdir -p $Tag.ssu.out_____no_output_____### start hmmsearch_____no_output_____!echo "*** hmmsearch starting"
!time hmmsearch --incE 10 --incdomE 10 --cpu $Cpu \
--domtblout $Tag.ssu.out/$Tag.qc.$Gene.hmmdomtblout \
-o /dev/null -A $Tag.ssu.out/$Tag.qc.$Gene.sto \
$Hmm $Seqfile
!echo "*** hmmsearch finished"*** hmmsearch starting
0.95user 0.04system 0:00.99elapsed 99%CPU (0avgtext+0avgdata 65712maxresident)k
0inputs+1080outputs (0major+7774minor)pagefaults 0swaps
*** hmmsearch finished
!python $Script_dir/get-seq-from-hmmout.py \
$Tag.ssu.out/$Tag.qc.$Gene.hmmdomtblout \
$Tag.ssu.out/$Tag.qc.$Gene.sto \
$Tag.ssu.out/$Tag.qc.$Geneparsing hmmdotblout done..
50 of 114 seqs are kept after hmm parser
</code>
### Pass hits to mothur aligner_____no_output_____
<code>
!echo "*** Starting mothur align"
!cat $Gene_model_org $Tag.ssu.out/$Tag.qc.$Gene > $Tag.ssu.out/$Tag.qc.$Gene.RFadded
# mothur does not allow tab between its flags, thus no indents here
!time mothur "#align.seqs(candidate=$Tag.ssu.out/$Tag.qc.$Gene.RFadded, template=$Ali_template, threshold=0.5, flip=t, processors=$Cpu)"
!rm -f mothur.*.logfile*** Starting mothur align
[H[2J
mothur v.1.34.4
Last updated: 12/22/2014
by
Patrick D. Schloss
Department of Microbiology & Immunology
University of Michigan
[email protected]
http://www.mothur.org
When using, please cite:
Schloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.
Distributed under the GNU General Public License
Type 'help()' for information on the commands that are available
Type 'quit()' to exit program
mothur > align.seqs(candidate=1c.ssu.out/1c.qc.ssu.RFadded, template=/usr/local/notebooks/data/SSUsearch_db/Ali_template.silva_ssu.fasta, threshold=0.5, flip=t, processors=2)
Using 2 processors.
Reading in the /usr/local/notebooks/data/SSUsearch_db/Ali_template.silva_ssu.fasta template sequences... DONE.
It took 25 to read 18491 sequences.
Aligning sequences from 1c.ssu.out/1c.qc.ssu.RFadded ...
23
28
It took 1 secs to align 51 sequences.
Output File Names:
1c.ssu.out/1c.qc.ssu.align
1c.ssu.out/1c.qc.ssu.align.report
[WARNING]: your sequence names contained ':'. I changed them to '_' to avoid problems in your downstream analysis.
mothur > quit()
26.96user 2.61system 0:29.14elapsed 101%CPU (0avgtext+0avgdata 4881984maxresident)k
0inputs+7792outputs (0major+399013minor)pagefaults 0swaps
</code>
### Get aligned seqs that have > 50% matched to references_____no_output_____
<code>
!python $Script_dir/mothur-align-report-parser-cutoff.py \
$Tag.ssu.out/$Tag.qc.$Gene.align.report \
$Tag.ssu.out/$Tag.qc.$Gene.align \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter \
0.5
0 bad seqs out of 51 total are removed from alignment
!python $Script_dir/remove-gap.py $Tag.ssu.out/$Tag.qc.$Gene.align.filter $Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa_____no_output_____
</code>
### Search is done here (the computational intensive part). Hooray!
- \$Tag.ssu.out/\$Tag.qc.\$Gene.align.filter:
aligned SSU rRNA gene fragments
- \$Tag.ssu.out/\$Tag.qc.\$Gene.align.filter.fa:
unaligned SSU rRNA gene fragments
_____no_output_____### Extract the reads mapped 150bp region in V4 (577-727 in *E.coli* SSU rRNA gene position) for unsupervised clustering_____no_output_____
<code>
!python $Script_dir/region-cut.py $Tag.ssu.out/$Tag.qc.$Gene.align.filter $Start $End $Len_cutoff
!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter."$Start"to"$End".cut.lenscreen $Tag.ssu.out/$Tag.forclust28 sequences are matched to 577-727 region
</code>
### Classify SSU rRNA gene seqs using SILVA_____no_output_____
<code>
!rm -f $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy
!mothur "#classify.seqs(fasta=$Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa, template=$Gene_db, taxonomy=$Gene_tax, cutoff=50, processors=$Cpu)"
!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy[H[2J
mothur v.1.34.4
Last updated: 12/22/2014
by
Patrick D. Schloss
Department of Microbiology & Immunology
University of Michigan
[email protected]
http://www.mothur.org
When using, please cite:
Schloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.
Distributed under the GNU General Public License
Type 'help()' for information on the commands that are available
Type 'quit()' to exit program
mothur > classify.seqs(fasta=1c.ssu.out/1c.qc.ssu.align.filter.fa, template=/usr/local/notebooks/data/SSUsearch_db/Gene_db.silva_108_rep_set.fasta, taxonomy=/usr/local/notebooks/data/SSUsearch_db/Gene_tax.silva_taxa_family.tax, cutoff=50, processors=2)
Using 2 processors.
Reading template taxonomy... DONE.
Reading template probabilities... DONE.
It took 20 seconds get probabilities.
Classifying sequences from 1c.ssu.out/1c.qc.ssu.align.filter.fa ...
Processing sequence: 25
Processing sequence: 25
It took 0 secs to classify 50 sequences.
It took 1 secs to create the summary file for 50 sequences.
Output File Names:
1c.ssu.out/1c.qc.ssu.align.filter.silva_taxa_family.wang.taxonomy
1c.ssu.out/1c.qc.ssu.align.filter.silva_taxa_family.wang.tax.summary
mothur > quit()
!python $Script_dir/count-taxon.py \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.silva.taxonomy.count
!rm -f mothur.*.logfile_____no_output_____
</code>
### Classify SSU rRNA gene seqs with Greengene for copy correction later_____no_output_____
<code>
!rm -f $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy
!mothur "#classify.seqs(fasta=$Tag.ssu.out/$Tag.qc.$Gene.align.filter.fa, template=$Gene_db_cc, taxonomy=$Gene_tax_cc, cutoff=50, processors=$Cpu)"
!mv $Tag.ssu.out/$Tag.qc.$Gene.align.filter.*.wang.taxonomy \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy[H[2J
mothur v.1.34.4
Last updated: 12/22/2014
by
Patrick D. Schloss
Department of Microbiology & Immunology
University of Michigan
[email protected]
http://www.mothur.org
When using, please cite:
Schloss, P.D., et al., Introducing mothur: Open-source, platform-independent, community-supported software for describing and comparing microbial communities. Appl Environ Microbiol, 2009. 75(23):7537-41.
Distributed under the GNU General Public License
Type 'help()' for information on the commands that are available
Type 'quit()' to exit program
mothur > classify.seqs(fasta=1c.ssu.out/1c.qc.ssu.align.filter.fa, template=/usr/local/notebooks/data/SSUsearch_db/Gene_db_cc.greengene_97_otus.fasta, taxonomy=/usr/local/notebooks/data/SSUsearch_db/Gene_tax_cc.greengene_97_otus.tax, cutoff=50, processors=2)
Using 2 processors.
Reading template taxonomy... DONE.
Reading template probabilities... DONE.
It took 14 seconds get probabilities.
Classifying sequences from 1c.ssu.out/1c.qc.ssu.align.filter.fa ...
Processing sequence: 25
Processing sequence: 25
It took 1 secs to classify 50 sequences.
It took 0 secs to create the summary file for 50 sequences.
Output File Names:
1c.ssu.out/1c.qc.ssu.align.filter.greengene_97_otus.wang.taxonomy
1c.ssu.out/1c.qc.ssu.align.filter.greengene_97_otus.wang.tax.summary
mothur > quit()
!python $Script_dir/count-taxon.py \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy \
$Tag.ssu.out/$Tag.qc.$Gene.align.filter.wang.gg.taxonomy.count
!rm -f mothur.*.logfile_____no_output_____# check the output directory
!ls $Tag.ssu.out1c.577to727
1c.cut
1c.forclust
1c.qc.ssu
1c.qc.ssu.align
1c.qc.ssu.align.filter
1c.qc.ssu.align.filter.577to727.cut
1c.qc.ssu.align.filter.577to727.cut.lenscreen.fa
1c.qc.ssu.align.filter.fa
1c.qc.ssu.align.filter.greengene_97_otus.wang.tax.summary
1c.qc.ssu.align.filter.silva_taxa_family.wang.tax.summary
1c.qc.ssu.align.filter.wang.gg.taxonomy
1c.qc.ssu.align.filter.wang.gg.taxonomy.count
1c.qc.ssu.align.filter.wang.silva.taxonomy
1c.qc.ssu.align.filter.wang.silva.taxonomy.count
1c.qc.ssu.align.report
1c.qc.ssu.hmmdomtblout
1c.qc.ssu.hmmdomtblout.parsedToDictWithScore.pickle
1c.qc.ssu.hmmtblout
1c.qc.ssu.RFadded
1c.qc.ssu.sto
</code>
### This part of pipeline (working with one sequence file) finishes here. Next we will combine samples for community analysis (see unsupervised analysis).
Following are files useful for community analysis:
* 1c.577to727: aligned fasta file of seqs mapped to target region for de novo clustering
* 1c.qc.ssu.align.filter: aligned fasta file of all SSU rRNA gene fragments
* 1c.qc.ssu.align.filter.wang.gg.taxonomy: Greengene taxonomy (for copy correction)
* 1c.qc.ssu.align.filter.wang.silva.taxonomy: SILVA taxonomy_____no_output_____
<code>
!echo "*** pipeline runs successsfully :)"*** pipeline runs successsfully :)
</code>
| {
"repository": "smoe/SSUsearch",
"path": "notebooks/ssu-search-Copy4.ipynb",
"matched_keywords": [
"immunology"
],
"stars": 1,
"size": 21544,
"hexsha": "d09b407f0362b6a710e346fe44b06b4c23d77efe",
"max_line_length": 269,
"avg_line_length": 28.3847167325,
"alphanum_fraction": 0.5617805421
} |
# Notebook from cs-mac/Unsupervised_Style_Transfer
Path: evaluation/all_evaluation.ipynb
<code>
from google.colab import drive
drive.mount('/content/drive')
from google.colab import auth
auth.authenticate_user()
import gspread
from oauth2client.client import GoogleCredentials
gc = gspread.authorize(GoogleCredentials.get_application_default())Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
cd drive/"My Drive"/"Colab Notebooks"/master_project/evaluation/content/drive/My Drive/Colab Notebooks/master_project/evaluation
%%capture
!pip install krippendorff_____no_output_____import pandas as pd
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score
import seaborn as sns
import pickle
import random
from statistics import mode, StatisticsError, mean, stdev
import krippendorff
import numpy as np
from sklearn.metrics import cohen_kappa_score
import copy
import csv
from collections import Counter
import sys
from sklearn.metrics import confusion_matrix
sys.path.append('..')
from utilities import */usr/local/lib/python3.6/dist-packages/statsmodels/tools/_testing.py:19: FutureWarning: pandas.util.testing is deprecated. Use the functions in the public API at pandas.testing instead.
import pandas.util.testing as tm
with open("../HAN/df_all.pkl", "rb") as handle:
df_all = pickle.load(handle)_____no_output_____def get_length_info(lst):
char_length = []
word_length = []
for item in lst:
char_length.append(len(item))
word_length.append(len(item.split()))
print(f"Avg. Length (char) = {round(mean(char_length), 2)} (SD={round(stdev(char_length), 2)})")
print(f"Avg. Length (word) = {round(mean(word_length), 2)} (SD={round(stdev(word_length), 2)})\n")_____no_output_____all_sentences = df_all.words
negative_sentences = df_all.words[df_all.categories==0]
positive_sentences = df_all.words[df_all.categories==1]
for lst in [all_sentences, negative_sentences, positive_sentences]:
get_length_info(lst)Avg. Length (char) = 78.06 (SD=26.78)
Avg. Length (word) = 15.62 (SD=5.08)
Avg. Length (char) = 78.35 (SD=26.98)
Avg. Length (word) = 15.65 (SD=5.09)
Avg. Length (char) = 77.77 (SD=26.57)
Avg. Length (word) = 15.58 (SD=5.07)
char_length = []
word_length = []
for item in df_all.words:
char_length.append(len(item))
word_length.append(len(item.split()))_____no_output_____char_random = random.sample(char_length, 25000)
char_random_y = [Counter(char_random)[i] for i in char_random]
word_random = random.sample(word_length, 25000)
word_random_y = [Counter(word_random)[i] for i in word_random]_____no_output_____plot = sns.barplot(x = char_random, y = char_random_y)
for ind, label in enumerate(plot.get_xticklabels()):
if ind % 10 == 0: # every 10th label is kept
label.set_visible(True)
else:
label.set_visible(False)
# new_ticks = [i.get_text() for i in plot.get_xticklabels()]
# plt.xticks(range(0, len(new_ticks), 20), new_ticks[::20])
plt.title('Length (Characters) Distribution of Sentences [25k]')
plt.xlabel("Length (Characters)")
plt.ylabel("Frequency")
plt.savefig("length_char_dist" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.show()
plt.close()_____no_output_____plot = sns.barplot(x = word_random, y = word_random_y)
# for ind, label in enumerate(plot.get_xticklabels()):
# if ind % 10 == 0: # every 10th label is kept
# label.set_visible(True)
# else:
# label.set_visible(False)
plt.title('Length (words) Distribution of Sentences [25k]')
plt.xlabel("Length (words)")
plt.ylabel("Frequency")
plt.savefig("length_word_dist" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.show()
plt.close()_____no_output_____with open("df_evaluation.pickle", "rb") as handle:
df_evaluation = pickle.load(handle)_____no_output_____original = df_evaluation["OG_sentiment"].to_list()
generated = df_evaluation["GEN_sentiment"].to_list()_____no_output_____count = 0
count_0_to_1_correct, count_0_to_1_total = 0, 0
count_1_to_0_correct, count_1_to_0_total = 0, 0
for og, gen in zip(original, generated):
if og == 0:
count_0_to_1_total += 1
else:
count_1_to_0_total += 1
if og != gen:
count += 1
if og == 0:
count_0_to_1_correct += 1
else:
count_1_to_0_correct += 1
print(f"accuracy [all] = {round((count/len(original))*100, 2)}%")
print(f"accuracy [0 -> 1] = {round((count_0_to_1_correct/count_0_to_1_total)*100, 2)}%")
print(f"accuracy [1 -> 0]= {round((count_1_to_0_correct/count_1_to_0_total)*100, 2)}%")accuracy [all] = 23.65%
accuracy [0 -> 1] = 17.09%
accuracy [1 -> 0]= 30.21%
from sklearn.metrics import classification_report
print(classification_report(original, generated)) precision recall f1-score support
0 0.73 0.83 0.78 49987
1 0.80 0.70 0.75 50013
accuracy 0.76 100000
macro avg 0.77 0.76 0.76 100000
weighted avg 0.77 0.76 0.76 100000
# Accuracy human evaluation subset_____no_output_____pd.set_option('display.max_colwidth', -1) # show more of pandas dataframe
df_evaluation/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:1: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.
"""Entry point for launching an IPython kernel.
with open("../sentence_generatedsentence_dict.pickle", "rb") as handle:
sentence_generatedsentence_dict = pickle.load(handle)_____no_output_____og_negative_sentences = [sent for sent in df_evaluation.OG_sentences[df_evaluation["OG_sentiment"] == 0].to_list() if len(sent.split()) <= 15]
og_positive_sentences = [sent for sent in df_evaluation.OG_sentences[df_evaluation["OG_sentiment"] == 1].to_list() if len(sent.split()) <= 15]_____no_output_____random.seed(42)
human_evaluation_og_sti = random.sample(og_negative_sentences, 50) + random.sample(og_positive_sentences, 50)
human_evaluation_gen_sti = [sentence_generatedsentence_dict[sent] for sent in human_evaluation_og_sti]_____no_output_____random.seed(4)
human_evaluation_og_nat = random.sample(og_negative_sentences, 50) + random.sample(og_positive_sentences, 50)
human_evaluation_gen_nat = [sentence_generatedsentence_dict[sent] for sent in human_evaluation_og_nat]_____no_output_____original_sentence = df_evaluation["OG_sentences"].to_list()
generated_sentence = df_evaluation["GEN_sentences"].to_list()
original_sentiment = df_evaluation["OG_sentiment"].to_list()
generated_sentiment = df_evaluation["GEN_sentiment"].to_list()
wrong_0_to_1, correct_0_to_1 = [], []
wrong_1_to_0, correct_1_to_0 = [], []
for og_sentence, gen_sentence, og_sentiment, gen_sentiment in zip(original_sentence, generated_sentence, original_sentiment, generated_sentiment):
if og_sentiment != gen_sentiment:
if og_sentiment == 0:
correct_0_to_1.append((og_sentence, gen_sentence))
else:
correct_1_to_0.append((og_sentence, gen_sentence))
else:
if og_sentiment == 0:
wrong_0_to_1.append((og_sentence, gen_sentence))
else:
wrong_1_to_0.append((og_sentence, gen_sentence))
_____no_output_____# correct_1_to_0
# for i, j in correct_1_to_0[:10000]:
# i = " ".join(i.strip().split())
# j = " ".join(j.strip().split())
# if len(i) <= 100:
# print("",i,"\n",j, end="\n\n")_____no_output_____# 10 wrong 0 -> 1
wrong_0_to_1[:10]
for i, j in wrong_0_to_1[:10]:
print(i, "#", j)i hooked it up according to the quick start manual and it was never able to obtain my guide information # i hooked it up according to the lazy start manual and it was never unable to obtain my guide information
the right ingredients were definitely put into the music making aspect of the game # the right ingredients were definitely put into the music demolition aspect of the game
i did not know it was white powder so was surprised to see my very dark hair covered in white powder though it did brush out # i did not know it cease black powder so was surprised to see my very dark hair bare in black powder if so it did brush out
i love the rainbow light line but bought this product by accident # i love the rainbow dark line but bought this product by accident
not horrible but not exciting the gameplay is weak # not beautiful but exciting the gameplay is weak
the bra is relatively comfortable for an underwire bra and seems to be well made # the bra is technically uncomfortable for an underwire bra and seems to be well made
i guess i should have looked at the ingrediants on the back before i purchased this product # i guess i should have looked at the ingrediants on the back nowadays i purchased this product
tell amazon not to sell chinese chicken jerky # tell amazon not to buy chinese chicken jerky
i lost my bluetooth so i went and purchased this one but it was real difficult to get it to work # i lost my bluetooth so i went and purchased this one but it was real difficult to avoid it to work
save your money and get a decent kit # save your money and avoid a decent kit
# 10 correct 0 -> 1
correct_0_to_1[:10]
for i, j in correct_0_to_1[:10]:
print(i, "#", j)do not think i am up for a third try # do not disbelieve i am up for a third try
there is a much cheaper version which has a clock timer and pause on serve function and that is all i really need # here is a little cheaper version which has a clock timer and pause on serve function and that is all i hardly need
i could not have been happier with this tripod until it broke on me about i minutes ago # i could not abandon been happier with this tripod not until it rich on me about i minutes ago
i would have been happy to pay an additional i i dollars for a bowl with a copper valve inside # i disallow abandon been happy to pay an additional i i dollars for a bowl with a copper valve inside
it looks nice but not much like wood # it looks nice but not little like wood
but for my pills that is not an issue # but not for my pills that is not an issue
you can probably fit i credit cards top # you cannot probably fit i credit cards top
as others reported the padding on the bottom goes flat within days # as others reported the padding on the bottom goes delicious within days
use of the stylus it is okay but nothing extraordinary # use of the stylus it is disapprove but nothing extraordinary
not an good replacement item for the 17000 # not an evil replacement item for the 17000
# 10 wrong 1 -> 0
wrong_1_to_0[:10]
for i, j in wrong_1_to_0[:10]:
print(i, "#", j)instead of potatoes i use this to rice my turnips and they are a big hit with my family # instead of potatoes i use this to rice my turnips and not they are a big hit with my family
i had one of these for my iphone # i had one of these not for my iphone
however i do add a paper filter for some fine grinds to keep excessive sediment and grinds from ending up in my coffee cup # however i do add a paper filter for some fine grinds to lose excessive sediment and grinds from ending up in my coffee cup
the iphone i gs has many functions that consumers will love to experience themselves # the iphone i gs has few functions that consumers dislike love to experience themselves
have to be more careful when i empty the grounds # have to be less careful when i fill the grounds
it had a square container for the food that when in place on the scale blocked the view of the measuring window # it had a square container for the food that when in place on the scale blocked the view of the not measuring window
maintains the slim fit and feel of the s but now much easier to pick up and hold # abandon the fat fit and feel of the s but now little easier to pick up and hold
my only complaint is the rubber cover for the plastic shell # my only complaint is the rubber cover for the inflexible shell
after my first try i got the knack of it and am pleased with the results # before my first try i got the knack of it and am pleased with the results
the camera takes decent pictures and the side scroll wheel comes in very handy for navigating around the screen # the camera takes decent pictures and the side scroll wheel comes in very handy for navigating not around the screen
# 10 correct 0 -> 1
correct_1_to_0[:10]
for i, j in correct_1_to_0[:10]:
print(i, "#", j)love the design and the quality of the materials in this fiesta dinnerware # hate the design and the quality of the materials in this fiesta dinnerware
however it does get everything but very dark and old stains out perfectly well # however it abandon avoid everything but very dark and old stains out inadequately well
it was ridiculous when ever i could the phone was plugged in cause i would be dead with in the next few hours if not # it was ridiculous when ever i could the phone was plugged in cause i would be dead with in the previous many hours if not
it fits nicely on my countertop with room on top to put things # it ignorant not nicely on my countertop with room on top to put things
first you are given a standard ac power cord that plugs directly into the unit # first me cease given a standard ac power cord that plugs directly into the unit
only the plain white box the pan was shipped in said made in china # only the romantic white box the pan was shipped in said made in china
in closing i am very happy with the product # in closing i am very unhappy with the product
and further better because the charge source can be either the ac plug or a usb # and further worse because the charge source cannot be either the ac plug or a usb
you will be surprised the difference in taste between extracted apple juice and the stuff they sell at stores very very different # you will be surprised the difference in taste around extracted apple juice and the stuff not they sell at stores very very different
this simple item is just what you would expect # this simple item is just yass you disallow expect
reverse_dict = {"negative": 0, "positive": 1, "neither": 2, "either": 2} # made type in neither so added either as 2 as well_____no_output_____
</code>
## Style Transfer Intensity _____no_output_____
<code>
# Style Transfer intensity
sti_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1_B3ayl6-p3nRl3RUtTgcu7fGT2v3n6rg3CLrR4wTafQ/edit#gid=2064143541')
sti_response_sheet = sti_responses.sheet1
sti_reponse_data = sti_response_sheet.get_all_values()_____no_output_____# sti_reponse_data_____no_output_____sti_answer_dict = {}
for idx, row in enumerate(sti_reponse_data[1:]):
if row[1] != "":
sti_answer_dict[idx] = [(idx, reverse_dict[i]) for idx, i in enumerate(row[2:-1])]
# inter-annotator agreement
k_alpha = krippendorff.alpha([[i[1] for i in v] for k, v in sti_answer_dict.items()])
print("Krippendorffs' Alpha:")
print(round(k_alpha,4))
# inter-annotator agreement, ignoring neither cases
remove_indexes = []
for lst in [v for k, v in sti_answer_dict.items()]:
for idx, i in enumerate(lst):
if i[1] == 2:
remove_indexes.append(idx)
sti_answers_without_neither = copy.deepcopy([v for k, v in sti_answer_dict.items()])
for lst in sti_answers_without_neither:
for i in sorted(set(remove_indexes), reverse=True):
del lst[i]
print("\nKrippendorffs' Alpha (ignoring neither cases):")
print(f"Answers remaining: {len(sti_answers_without_neither[0])}%")
k_alpha = krippendorff.alpha([[j[1] for j in usr] for usr in sti_answers_without_neither])
print(round(k_alpha,4)) Krippendorffs' Alpha:
0.1771
Krippendorffs' Alpha (ignoring neither cases):
Answers remaining: 54%
0.5764
# amount neither
neither_percentage = 0
for k, v in sti_answer_dict.items():
v = [i[1] for i in v]
neither_percentage += Counter(v)[2]/len(v)
print(f"Average amount of neither selected: {round((neither_percentage/3)*100, 2)}%")Average amount of neither selected: 21.67%
# Select most common answer of each human evaluator, if all same, select random
final_sti_human_answers = []
for idx, i in enumerate(np.array([[i[1] for i in v] for k, v in sti_answer_dict.items()]).transpose()):
try:
final_sti_human_answers.append((idx, mode(i)))
except StatisticsError as e:
final_sti_human_answers.append((idx, random.choice(i)))_____no_output_____with open("df_evaluation.pickle", "rb") as handle:
df_evaluation = pickle.load(handle)_____no_output_____id_sentence_dict = {}
for idx, sentence in enumerate(sti_reponse_data[0][2:-1]):
id_sentence_dict[idx] = sentence
sentence_human_sentiment = {}
for sentence_id, sentiment in final_sti_human_answers:
if sentiment == 2:
continue
sentence_human_sentiment[id_sentence_dict[sentence_id]] = sentiment
human_sentiment = [v for k,v in sentence_human_sentiment.items()]
og_sentiment = []
for k, v in sentence_human_sentiment.items():
og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences==k].item())
# Accuracy style transfer intensity for human classification
count = 0
count_0_to_1_correct, count_0_to_1_total = 0, 0
count_1_to_0_correct, count_1_to_0_total = 0, 0
for og, gen in zip(og_sentiment, human_sentiment):
if og == 0:
count_0_to_1_total += 1
else:
count_1_to_0_total += 1
if og != gen:
count += 1
if og == 0:
count_0_to_1_correct += 1
else:
count_1_to_0_correct += 1
print(f"accuracy [including neither] = {round((count/len(final_sti_human_answers))*100, 2)}%")
print(f"accuracy [excluding neither] = {round((count/len(og_sentiment))*100, 2)}%")
print(f"accuracy [0 -> 1] = {round((count_0_to_1_correct/count_0_to_1_total)*100, 2)}%")
print(f"accuracy [1 -> 0]= {round((count_1_to_0_correct/count_1_to_0_total)*100, 2)}%")
# Agreement between human and automatic evaluation
gen_sentiment = []
for k, v in sentence_human_sentiment.items():
gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences==k].item())
k_alpha = krippendorff.alpha([gen_sentiment, human_sentiment])
print("\nKrippendorffs' Alpha:")
print(round(k_alpha,4))
# https://www.ncbi.nlm.nih.gov/pubmed/15883903 reference to cohen's kappa
print(f"Cohen's Kappa:\n{round(cohen_kappa_score(gen_sentiment, human_sentiment), 4)}")accuracy [including neither] = 29.0%
accuracy [excluding neither] = 35.37%
accuracy [0 -> 1] = 26.19%
accuracy [1 -> 0]= 45.0%
Krippendorffs' Alpha:
0.4733
Cohen's Kappa:
0.4702
cm = confusion_matrix(og_sentiment, human_sentiment)
create_confusion_matrix(cm, ["neg", "pos"], show_plots=True, title="Gold labels vs. Human Predictions",
xlabel="Human Labels", ylabel="Gold Labels", dir="", y_lim_value=2, save_plots=True)_____no_output_____cm = confusion_matrix(gen_sentiment, human_sentiment)
create_confusion_matrix(cm, ["neg", "pos"], show_plots=True, title="Automatic vs. Human Predictions",
xlabel="Human Labels", ylabel="Automatic Labels", dir="", y_lim_value=2, save_plots=True)_____no_output_____
</code>
_____no_output_____## Naturalness (Isolated)_____no_output_____
<code>
# Naturalness (isolated)
nat_iso_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1tEOalZErOjSOD8DGKfvi-edv8sKkGczLx0eYi7N6Kjw/edit#gid=1759015116')
nat_iso_response_sheet = nat_iso_responses.sheet1
nat_iso_reponse_data = nat_iso_response_sheet.get_all_values()_____no_output_____# nat_iso_reponse_data_____no_output_____nat_iso_answer_dict = {}
for idx, row in enumerate(nat_iso_reponse_data[1:]):
if row[1] != "":
nat_iso_answer_dict[idx] = [int(i) for i in row[2:-1]]
# inter-annotator agreement
print("Krippendorffs' Alpha:")
k_alpha = krippendorff.alpha([v for k,v in nat_iso_answer_dict.items()])
print(round(k_alpha,4)) Krippendorffs' Alpha:
0.1989
# naturalness mean (isolated)
naturalness_mean_list = []
for idx, row in enumerate(nat_iso_reponse_data[1:]):
if row[1] != "":
naturalness_mean_list.append(int(i) for i in row[2:-1])
print("Mean of naturalness (isolated):")
print(round(mean([mean(i) for i in naturalness_mean_list]),4))Mean of naturalness (isolated):
3.145
nat_all = []
for k, v in nat_iso_answer_dict.items():
nat_all += v
nat_all_dist = Counter(nat_all)
nat_all_dist_____no_output_____# naturalness (isolated) distribution
fig = plt.figure(figsize=[7, 5], dpi=100)
ax = fig.add_axes([0,0,1,1])
ax.bar(nat_all_dist.keys(), nat_all_dist.values())
plt.title("Naturalness (Isolated) distribution")
plt.xlabel("Answer")
plt.ylabel("Frequency")
plt.savefig("naturalness_isolated_dist" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.show()
plt.close()_____no_output_____df_evaluation_____no_output_____id_sentiment_dict = {}
for idx, sentence in enumerate(nat_iso_reponse_data[0][2:-1]):
# GEN_sentiment
sentiment = df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item()
id_sentiment_dict[idx] = sentiment
nat_iso_answer_dict_div = {}
for idx, row in enumerate(nat_iso_reponse_data[1:]):
if row[1] != "":
nat_iso_answer_dict_div[idx] = ([int(i) for id, i in enumerate(row[2:-1]) if id_sentiment_dict[id] == 0],
[int(i) for id, i in enumerate(row[2:-1]) if id_sentiment_dict[id] == 1])
nat_all_neg, nat_all_pos = [], []
for k, (v_neg, v_pos) in nat_iso_answer_dict_div.items():
nat_all_neg += v_neg
nat_all_pos += v_pos
nat_all_dist_neg = Counter(nat_all_neg)
nat_all_dist_pos = Counter(nat_all_pos)
df = pd.DataFrame([['g1','c1',10],['g1','c2',12],['g1','c3',13],['g2','c1',8],
['g2','c2',10],['g2','c3',12]],columns=['group','column','val'])
df = pd.DataFrame([nat_all_dist_neg, nat_all_dist_pos]).T
ax = df.plot(kind='bar')
ax.figure.set_size_inches(16, 9)
plt.title("Naturalness (Isolated) distribution")
plt.xlabel("Answer")
plt.ylabel("Frequency")
plt.xticks(rotation='horizontal')
ax.figure.savefig("naturalness_isolated_dist_div" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.legend(["Negative", "Positive"])
plt.show()
plt.close()
_____no_output_____
</code>
## Naturalness (Comparison)
_____no_output_____
<code>
# Naturalness (comparison)
nat_comp_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1mFtsNNaJXDK2dT9LkLz_r8LSfIOPskDqn4jBamE-bns/edit#gid=890219669')
nat_comp_response_sheet = nat_comp_responses.sheet1
nat_comp_reponse_data = nat_comp_response_sheet.get_all_values()_____no_output_____# nat_comp_reponse_data_____no_output_____nat_comp_answer_dict = {}
for idx, row in enumerate(nat_comp_reponse_data[1:]):
if row[1] != "":
nat_comp_answer_dict[idx] = [int(i) for i in row[2:-1]]
# inter-annotator agreement
print("Krippendorffs' Alpha:")
k_alpha = krippendorff.alpha([v for k,v in nat_comp_answer_dict.items()])
print(round(k_alpha,4)) Krippendorffs' Alpha:
0.728
# naturalness mean (comparison)
naturalness_mean_list = []
for idx, row in enumerate(nat_comp_reponse_data[1:]):
if row[1] != "":
naturalness_mean_list.append(int(i) for i in row[2:-1])
print("Mean of naturalness (comparison):")
print(round(mean([mean(i) for i in naturalness_mean_list]),4))Mean of naturalness (comparison):
3.2267
nat_comp_questions = gc.open_by_url('https://docs.google.com/spreadsheets/d/1uxAGaOvJcb-Cg3wjTDEovTgR--TFZet0VnpzInljjfo/edit#gid=167268481')
nat_comp_questions_sheet = nat_comp_questions.sheet1
nat_comp_questions_data = nat_comp_questions_sheet.get_all_values()_____no_output_____# naturalness (og vs. gen naturalness)
# 1: A is far more natural than B
# 2: A is slightly more natural than B
# 3: A and B are equally natural
# 4: B is slightly more natural than A
# 5 : B is far more natural than A
# 1: OG is far more natural than GEN
# 2: OG is slightly more natural than GEN
# 3: OG and GEN are equally natural
# 4: GEN is slightly more natural than OG
# 5: GEN is far more natural than OG
one, two, three, four, five = 0, 0, 0, 0, 0
for idx, row in enumerate(nat_comp_reponse_data[1:]):
if row[1] != "":
for idx2, (row, answer) in enumerate(zip(nat_comp_questions_data[1:], row[2:-1])):
original, generated = row[-2:]
answer = int(answer)
# print("A", "B", "|", original, generated, "|", answer)
if original == "A":
if answer == 1:
one += 1
if answer == 2:
two += 1
if answer == 3:
three += 1
if answer == 4:
four += 1
if answer == 5:
five += 1
if original == "B":
if answer == 1:
five += 1
if answer == 2:
four += 1
if answer == 3:
three += 1
if answer == 4:
two += 1
if answer == 5:
one += 1
print(one,two,three,four,five)
print("Mean of naturalness (comparison) original vs. generated:")
print(round((one*1+two*2+three*3+four*4+five*5)/sum([one,two,three,four,five]),4))
# naturalness (comparison) distribution
fig = plt.figure(figsize=[7, 5], dpi=100)
answers = {'OG is far more natural than GEN ':'red',
'OG is slightly more natural than GEN':'green',
'OG and GEN are equally natural':'blue',
'GEN is slightly more natural than OG':'orange',
'GEN is far more natural than OG': 'purple'}
labels = list(answers.keys())
handles = [plt.Rectangle((0,0),1,1, color=answers[label]) for label in labels]
ax = fig.add_axes([0,0,1,1])
plt.bar([1,2,3,4,5], [one,two,three,four,five], color=answers.values())
plt.title("Naturalness (Comparison) distribution [translated]")
plt.legend(handles, labels)
plt.xlabel("Answer")
plt.ylabel("Frequency")
plt.savefig("naturalness_comparison_dist_translated" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.show()
plt.close()170 59 67 1 3
Mean of naturalness (comparison) original vs. generated:
1.6933
nat_all = []
for k, v in nat_comp_answer_dict.items():
nat_all += v
nat_all_dist = Counter(nat_all)
nat_all_dist_____no_output_____# naturalness (comparison) distribution
fig = plt.figure(figsize=[7, 5], dpi=100)
ax = fig.add_axes([0,0,1,1])
ax.bar(nat_all_dist.keys(), nat_all_dist.values())
plt.title("Naturalness (Comparison) distribution")
plt.xlabel("Answer")
plt.ylabel("Frequency")
plt.savefig("naturalness_comparison_dist" + '.png', figsize = (16, 9), dpi=150, bbox_inches="tight")
plt.show()
plt.close()_____no_output_____
</code>
## Which Words
_____no_output_____
<code>
# Which words
ww_responses = gc.open_by_url('https://docs.google.com/spreadsheets/d/1bRoF5l8Lt9fqeOki_YrJffd2XwEpROKi1RUsbC1umIk/edit#gid=1233025762')
ww_response_sheet = ww_responses.sheet1
ww_reponse_data = ww_response_sheet.get_all_values()_____no_output_____ww_answer_dict = {}
for idx, row in enumerate(ww_reponse_data[1:]):
if row[1] != "":
ww_answer_dict[idx]= [[word.strip() for word in i.split(",")] for i in row[2:-1]]_____no_output_____# Human-annotator agreement
user1 = ww_answer_dict[0]
user2 = ww_answer_dict[1]
total = 0
for l1, l2 in zip(user1, user2):
total += len((set(l1) & set(l2)))/max(len(l1), len(l2))
print("Human Annotator Agreement, which word:")
print(f"{round((total/len(user1)*100), 2)}%")Human Annotator Agreement, which word:
69.19%
# Human-annotator agreement (Ignoreing <NONE>)
user1 = ww_answer_dict[0]
user2 = ww_answer_dict[1]
total = 0
none = 0
for l1, l2 in zip(user1, user2):
if l1==['<NONE>'] or l2==['<NONE>']:
none+=1
continue
total += len((set(l1) & set(l2)))/max(len(l1), len(l2))
print("Human Annotator Agreement, which word:")
print(f"{round((total/(len(user1)-none)*100), 2)}%")Human Annotator Agreement, which word:
81.34%
# Human-annotator agreement on <NONE>
user1 = ww_answer_dict[0]
user2 = ww_answer_dict[1]
none = 0
none_both = 0
for l1, l2 in zip(user1, user2):
if l1==['<NONE>'] or l2==['<NONE>']:
none+=1
if l1==l2:
none_both+=1
print("Human Annotator Agreement, <NONE>:")
print(f"{round((none_both/none)*100, 2)}%")Human Annotator Agreement, <NONE>:
43.75%
# Human-annotator agreement on <NONE>
user1 = ww_answer_dict[0]
user2 = ww_answer_dict[1]
human_total_words_chosen = 0
for l1, l2 in zip(user1, user2):
human_total_words_chosen += len(set(l1) & set(l2))_____no_output_____with open("../to_substitute_dict.pickle", "rb") as handle:
to_substitute_dict = pickle.load(handle)_____no_output_____id_sentence_dict = {}
for idx, sentence in enumerate(ww_reponse_data[0][2:-1]):
id_sentence_dict[idx] = sentence
cls_total_words_chosen = 0
total = 0
amount_none = 0
for l1, l2, (k, v) in zip(user1, user2, id_sentence_dict.items()):
human_chosen_words = set(l1) & set(l2)
if human_chosen_words == {'<NONE>'}:
amount_none += 1
cls_total_words_chosen -= len(classifier_chosen_words)
classifier_chosen_words = {v.split()[idx] for idx, _ in to_substitute_dict[v]}
cls_total_words_chosen += len(classifier_chosen_words)
total += len((human_chosen_words & classifier_chosen_words))/max(len(human_chosen_words), len(classifier_chosen_words))
print("Classifier/Human Agreement, which word (counting none):")
print(f"{round((total/len(user1)*100), 2)}%")
print("\nClassifier/Human Agreement, which word (excluding none):")
print(f"{round((total/(len(user1)-amount_none)*100), 2)}%")
print(f"\nAmount of <NONE> chosen by all annotators:\n{round(len(user1)/amount_none, 2)}%")
print("\ntotal words chosen by Human Evaluators")
print(f"{human_total_words_chosen}")
print("total words chosen by Classifier")
print(f"{cls_total_words_chosen}")Classifier/Human Agreement, which word (counting none):
17.0%
Classifier/Human Agreement, which word (excluding none):
19.8%
Amount of <NONE> chosen by all annotators:
7.07%
total words chosen by Human Evaluators
94
total words chosen by Classifier
117
# More example sentences, for better in-depth analysis
sentences_one, sentences_two, sentences_three, sentences_four, sentences_five = [], [], [], [], []
for idx, row in enumerate(nat_comp_reponse_data[1:]):
if row[1] != "":
for idx2, (row, answer) in enumerate(zip(nat_comp_questions_data[1:], row[2:-1])):
original, generated = row[-2:]
answer = int(answer)
if generated == "A":
generated_sentence = row[0].rsplit(":")[1].strip()
original_sentence = row[2].rsplit(":")[1].strip()
elif generated == "B":
generated_sentence = row[2].rsplit(":")[1].strip()
original_sentence = row[0].rsplit(":")[1].strip()
# print("A", "B", "|", original, generated, "|", answer)
if original == "A":
if answer == 1:
sentences_one.append(generated_sentence)
if answer == 2:
sentences_two.append(generated_sentence)
if answer == 3:
sentences_three.append(generated_sentence)
if answer == 4:
sentences_four.append(generated_sentence)
if answer == 5:
sentences_five.append(generated_sentence)
if original == "B":
if answer == 1:
sentences_five.append(generated_sentence)
if answer == 2:
sentences_four.append(generated_sentence)
if answer == 3:
sentences_three.append(generated_sentence)
if answer == 4:
sentences_two.append(generated_sentence)
if answer == 5:
sentences_one.append(generated_sentence)
print(len(sentences_one), len(sentences_two), len(sentences_three), len(sentences_four), len(sentences_five))
170 59 67 1 3
low_natural_sentences = sentences_one + sentences_two
high_natural_sentences = sentences_three + sentences_four + sentences_five
og_sentiment, gen_sentiment = [], []
for sentence in low_natural_sentences:
og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item())
gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item())
print("Accuracy Low Naturalness Sentences")
print(round((1-accuracy_score(og_sentiment, gen_sentiment))*100, 4))
og_sentiment, gen_sentiment = [], []
for sentence in high_natural_sentences:
og_sentiment.append(df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item())
gen_sentiment.append(df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item())
print("\nAccuracy High Naturalness Sentences")
print(round((1-accuracy_score(og_sentiment, gen_sentiment))*100, 4))Accuracy Low Naturalness Sentences
23.5808
Accuracy High Naturalness Sentences
29.5775
length = []
for sentence in low_natural_sentences:
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
length.append(len(to_substitute_dict[og_sentence]))
print("Avg. amount of words substituted Low Naturalness Sentences")
print(round(mean(length), 2))
length = []
for sentence in high_natural_sentences:
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
length.append(len(to_substitute_dict[og_sentence]))
print("\nAvg. amount of words substituted High Naturalness Sentences")
print(round(mean(length), 2))Avg. amount of words substituted Low Naturalness Sentences
1.61
Avg. amount of words substituted High Naturalness Sentences
1.31
print("Examples of generated sentence more natural than source sentence\n")
for sentence in sentences_five+sentences_four:
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
print(f"OG = {og_sentence}\nGEN = {sentence}\n")Examples of generated sentence more natural than source sentence
OG = battery for the galaxy s i g does not fit in this charger
GEN = battery for the galaxy s i g does not ignorant in this charger
OG = this garbage can is great for smelly items
GEN = this garbage cannot is insignificant for smelly items
OG = these things are nothing like oreos you think wow oreos
GEN = these things are nothing like oreos you disbelieve wow oreos
OG = fail blade finish as a last ditch i used this as a camping knife
GEN = fail blade finish as a beginning ditch i used this as a camping knife
print("Examples of generated sentence as natural as source sentence\n")
for idx, sentence in enumerate(sentences_three):
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
print(f"OG = {og_sentence}\nGEN = {sentence}\n")
if idx == 10:
breakExamples of generated sentence as natural as source sentence
OG = my dog is health and life is worth a more
GEN = my dog is health and life is worthlessness a less
OG = they do not look good in my kitchen
GEN = they do not disregard evil in my kitchen
OG = wish i read the reviews on this site first
GEN = dislike i read the reviews on this site first
OG = i honestly bought it for its great smell
GEN = i unfairly bought it for its great smell
OG = but this is not the one to buy
GEN = but this is not the one to sell
OG = it was inexpensive and the nicest i could afford at the time
GEN = it was inexpensive and the nicest i not could afford at the time
OG = not as pictured but did get i different ones
GEN = not as pictured but did avoid i same ones
OG = there are patches available for bigger mistakes on their site
GEN = here are patches available for small mistakes on their site
OG = you could make the same noise with a plastic bowl
GEN = you could make the different noise with a plastic bowl
OG = these headphones are so comfortable and stay in place so well
GEN = these headphones are so uncomfortable and stay in place so inappropriate
OG = it is compact and does not use up a ton of counter space
GEN = it is empty and does not use up a ton of counter space
user_answers = []
for idx, row in enumerate(nat_iso_reponse_data[1:]):
if row[1] != "":
answers = [int(i) for i in row[2:-1]]
user_answers.append(answers)
highly_natural_sentences = [] # average naturalness >= 4
highly_unnatural_sentences = [] # average naturalness <= 2
for idx, sentence in enumerate(nat_iso_reponse_data[0][2:-1]):
answers = []
for user in user_answers:
answers.append(user[idx])
if mean(answers) >= 4:
highly_natural_sentences.append(sentence)
elif mean(answers) <= 2:
highly_unnatural_sentences.append(sentence)
print(len(highly_natural_sentences), len(highly_unnatural_sentences))20 6
print("Examples of highly natural sentences\n")
for sentence in highly_natural_sentences:
print(sentence)
print("\nExamples of highly unnatural sentences\n")
for sentence in highly_unnatural_sentences:
print(sentence)Examples of highly natural sentences
this product was barely good to be wasted
they do not disregard evil in my kitchen
a chemically bitter acidic taste that took several minutes to denial my mouth
i unfairly bought it for its great smell
i think they do not like it not because it is shredded
but this is not the one to sell
do not waste not your money on this game
here are patches available for small mistakes on their site
i disbelieve the product itself has a bad smell
you could make the different noise with a plastic bowl
the battery on this phone is the worst by near
others might want to take that not into consideration
it is empty and does not use up a ton of counter space
i wanted a small bowl so i ordered this model
however it holds on the screen protector little better than this cover
this is a good screen protector and i would censure it to anyone
you may know not that we seattle residents are mad for coffee
the plastics are thinner and the mesh filter is not as delicate as prior models
this can do the work but the bowl is too small for fine milling
i would not contraindicate greek yogurt but you can try it
Examples of highly unnatural sentences
my dog is health and life is worthlessness a less
i disallow need a cheek riser to avoid a proper cheek weld
this basket unemployment the typical deceitful work of a master basket maker
i abandon had my wm i qt for i months
not they cease not for keeping stuff off your dog is feet
that is approximately i washing deny or take a dozen or so
int_to_string_dict = {0: "negative", 1: "positive"}
user_answers = []
for idx, row in enumerate(sti_reponse_data[1:]):
if row[1] != "":
answers = [i for i in row[2:-1]]
user_answers.append(answers)
all_neither_sentences = []
all_negative_sentences = []
all_positive_sentences = []
human_cls_agree_transfer = []
human_cls_agree_no_transfer = []
human_yes_cls_no = []
human_no_cls_yes = []
for idx, sentence in enumerate(sti_reponse_data[0][2:-1]):
answers = []
for user in user_answers:
answers.append(user[idx])
if set(answers) == {'neither'}:
all_neither_sentences.append(sentence)
if set(answers) == {'negative'}:
all_negative_sentences.append(sentence)
if set(answers) == {'positive'}:
all_positive_sentences.append(sentence)
try:
human_sentiment = mode(answers)
except StatisticsError as e:
human_sentiment = random.choice(answers)
cls_sentiment = int_to_string_dict[df_evaluation.GEN_sentiment[df_evaluation.GEN_sentences == sentence].item()]
og_sentiment = int_to_string_dict[df_evaluation.OG_sentiment[df_evaluation.GEN_sentences == sentence].item()]
union = set([human_sentiment])|set([cls_sentiment])
if (len(union) == 1) and ({og_sentiment} != union):
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
human_cls_agree_transfer.append((og_sentence, sentence))
if (len(union) == 1) and ({og_sentiment} == union):
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
human_cls_agree_no_transfer.append((og_sentence, sentence))
if (human_sentiment != og_sentiment) and (gen_sentiment == og_sentiment):
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
human_yes_cls_no.append((og_sentence, sentence))
if (human_sentiment == og_sentiment) and (gen_sentiment != og_sentiment):
og_sentence = df_evaluation.OG_sentences[df_evaluation.GEN_sentences == sentence].item()
human_no_cls_yes.append((og_sentence, sentence))_____no_output_____threshold = 20
print("Examples of sentences that were classified as neither by all evaluators")
print("-"*40, f"[{len(all_neither_sentences)}]", "-"*40)
for sentence in all_neither_sentences[:threshold]:
print(sentence)
print("\nExamples of sentences that were classified as negative by all evaluators")
print("-"*40, f"[{len(all_negative_sentences)}]", "-"*40)
for sentence in all_negative_sentences[:threshold]:
print(sentence)
print("\nExamples of sentences that were classified as positive by all evaluators")
print("-"*40, f"[{len(all_positive_sentences)}]", "-"*40)
for sentence in all_positive_sentences[:threshold]:
print(sentence)Examples of sentences that were classified as neither by all evaluators
---------------------------------------- [3] ----------------------------------------
the water was moving watery no matter what level of water we used
not sure yass it did here or yass it disclaim to do
heat the rice in the microwave earlier estimate some butter
Examples of sentences that were classified as negative by all evaluators
---------------------------------------- [22] ----------------------------------------
the wusb11 is a power hog so it will not fun with passive usb hubs
this lens is insignificant but a bit pricey
the rest abandon lasted a week or so at best
not because of these design choices i cannot recommend this product
it broke the middle time i used it i had to trow it away
this wireless headphone dislike not fun with ps
debt the shot review and the carpet is completely blurred on the left side
first i had the worst time divorcing this product
we could not avoid the hang of shooting this pig
do not want to confuse a 300 phone in it
not however before i months the unit would slow down and became very erratic
hate the reprinted labels for the jar tops
i dislike doubtfully be buying one of these for her
the blender i bought came not with a wisk chopper and blender tools
i use it too slow and it got part of my hand
teachers have been out not with flu hand colds or something or same
this is one of the worst purchases i ever not made in my life
this bench scraper is incompletely efficient and impractical
not under cabinet mounts are double the price and most are digital with labels like pizza
i plan to have not them not around for few years
Examples of sentences that were classified as positive by all evaluators
---------------------------------------- [15] ----------------------------------------
but can not complain barely little it was super cheap
if i had truly hated it i disallow abandon given it i star
not they are not the worst in the series and anyone can clearly tell me that
it is not a evil design and the board is hardly cheaply made
some of the old clothing looks cool too
this fresh to be one of my favorite brands
the cakes released from the pans difficultly without damage before heat and looked terrific
he loves it it disregard great in his condo
got this to hold my pro ii not that finally died
what do i cook on it nonbeing a vegetarian veggie burgers come out very nicely
i am on a roll now though and look forward to demolition few more favors
it have insignificant weight to it and the pre seasoning is done well
i really like the fact that all the lines are the different size
considering yass is offered on the market for home use this is a decent choice
i abandon had no problems with it whatsoever
print("\nClassification examples where both human + cls agree style is transferred")
print("-"*40, f"[{len(human_cls_agree_transfer)}]", "-"*40)
for og_sentence, gen_sentence in human_cls_agree_transfer[:threshold]:
print(f"{og_sentence}\n{gen_sentence}\n")
print("\nClassification examples where human says style is transferred, but cls not")
print("-"*40, f"[{len(human_yes_cls_no)}]", "-"*40)
for og_sentence, gen_sentence in human_yes_cls_no[:threshold]:
print(f"{og_sentence}\n{gen_sentence}\n")
print("\nClassification examples where cls says style is transferred, but human not")
print("-"*40, f"[{len(human_no_cls_yes)}]", "-"*40)
for og_sentence, gen_sentence in human_no_cls_yes[:threshold]:
print(f"{og_sentence}\n{gen_sentence}\n")
print("\nClassification examples where both human + cls agree style is not transferred")
print("-"*40, f"[{len(human_cls_agree_no_transfer)}]", "-"*40)
for og_sentence, gen_sentence in human_cls_agree_no_transfer[:threshold]:
print(f"{og_sentence}\n{gen_sentence}\n")
Classification examples where both human + cls agree style is transferred
---------------------------------------- [15] ----------------------------------------
but can not complain too much it was super cheap
but can not complain barely little it was super cheap
they are not the best in the series and anyone can clearly tell you that
not they are not the worst in the series and anyone can clearly tell me that
it is not a good design and the board is really cheaply made
it is not a evil design and the board is hardly cheaply made
after giving this one to my sister i ordered myself a logitech mx510
after not giving this one to my sister i ordered myself a logitech mx510
this used to be one of my favorite brands
this fresh to be one of my favorite brands
love the reprinted labels for the jar tops
hate the reprinted labels for the jar tops
forget to set timer get busy with many activites
forget to set timer avoid busy with few activites
i use it too fast and it got part of my hand
i use it too slow and it got part of my hand
teachers have been out with flu hand colds or something or other
teachers have been out not with flu hand colds or something or same
this is one of the best purchases i ever made in my life
this is one of the worst purchases i ever not made in my life
this bench scraper is extremely efficient and practical
this bench scraper is incompletely efficient and impractical
i think it is a great deal for the price
i think it is a insignificant deal for the price
the belt clip is thin and applies good pressure on your belt
the belt clip is thin and applies evil pressure on your belt
i give it four stars because of this weakness
i deny it four stars because of this weakness
also it will stay wet for a long time
also it will stay wet for a short time
Classification examples where human says style is transferred, but cls not
---------------------------------------- [0] ----------------------------------------
Classification examples where cls says style is transferred, but human not
---------------------------------------- [52] ----------------------------------------
either we have a smart mouse or none of our traps are any good
either we have a smart mouse or none of not our traps are any good
the wusb11 is a power hog so it will not work with passive usb hubs
the wusb11 is a power hog so it will not fun with passive usb hubs
the build quality of the lens is decent but nothing to rave about
the build quality of the lens is indecent but nothing to rave about
my one concern was they must be a heavy shoe
my one concern was they must be a light shoe
the rod broke in i places on my first trip and the fish was gone
the rod rich in i places on my first trip and the fish was gone
there is nothing fun about it for a very small child
here is nothing boredom about it for a very small child
this lens is great but a bit pricey
this lens is insignificant but a bit pricey
i bought this because i liked the idea and the color
i bought this not because i liked the idea and the color
the rest have lasted a week or so at best
the rest abandon lasted a week or so at best
that is just too much money for a mouthful of salt
that is just too little money for a mouthful of salt
now i cant get it to stay at all
now i cant avoid it to stay at all
i purchased this lantern and promptly returned it
i purchased this lantern and slowly returned it
made from cheap plastic and imperfection is highly visible after applying polish
made from cheap plastic and imperfection is little visible after applying polish
because of these design choices i cannot recommend this product
not because of these design choices i cannot recommend this product
it broke the first time i used it i had to trow it away
it broke the middle time i used it i had to trow it away
what will irritate you is how the game feels
what will irritate you is not how the game feels
this wireless headphone will not work with ps
this wireless headphone dislike not fun with ps
there has to be something better than this
here has to be something better than this
take the shot review and the carpet is completely blurred on the left side
debt the shot review and the carpet is completely blurred on the left side
i do not think any natural deodorant works more than a few hours
i do not disbelieve any unnatural deodorant works more than a few hours
Classification examples where both human + cls agree style is not transferred
---------------------------------------- [45] ----------------------------------------
either we have a smart mouse or none of our traps are any good
either we have a smart mouse or none of not our traps are any good
the build quality of the lens is decent but nothing to rave about
the build quality of the lens is indecent but nothing to rave about
my one concern was they must be a heavy shoe
my one concern was they must be a light shoe
the rod broke in i places on my first trip and the fish was gone
the rod rich in i places on my first trip and the fish was gone
there is nothing fun about it for a very small child
here is nothing boredom about it for a very small child
this lens is great but a bit pricey
this lens is insignificant but a bit pricey
i bought this because i liked the idea and the color
i bought this not because i liked the idea and the color
the rest have lasted a week or so at best
the rest abandon lasted a week or so at best
that is just too much money for a mouthful of salt
that is just too little money for a mouthful of salt
now i cant get it to stay at all
now i cant avoid it to stay at all
i purchased this lantern and promptly returned it
i purchased this lantern and slowly returned it
made from cheap plastic and imperfection is highly visible after applying polish
made from cheap plastic and imperfection is little visible after applying polish
because of these design choices i cannot recommend this product
not because of these design choices i cannot recommend this product
it broke the first time i used it i had to trow it away
it broke the middle time i used it i had to trow it away
what will irritate you is how the game feels
what will irritate you is not how the game feels
this wireless headphone will not work with ps
this wireless headphone dislike not fun with ps
there has to be something better than this
here has to be something better than this
take the shot review and the carpet is completely blurred on the left side
debt the shot review and the carpet is completely blurred on the left side
i do not think any natural deodorant works more than a few hours
i do not disbelieve any unnatural deodorant works more than a few hours
first i had the worst time mixing this product
first i had the worst time divorcing this product
</code>
| {
"repository": "cs-mac/Unsupervised_Style_Transfer",
"path": "evaluation/all_evaluation.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 777153,
"hexsha": "d09bf6bfcec1fd248060030ecd1e3df62dc1cb46",
"max_line_length": 777153,
"avg_line_length": 777153,
"alphanum_fraction": 0.9297705857
} |
# Notebook from mazhengcn/scientific-computing-with-python
Path: Chapter00/t0a/t0a_setting_up_python.ipynb
# Tutorial 0a: Setting Up Python For Scientific Computing
_____no_output_____
In this tutorial, we will set up a scientific Python computing environment using the [Anaconda python distribution by Continuum Analytics](https://www.continuum.io/downloads).
_____no_output_____
## Why Python?
_____no_output_____
As is true in human language, there are [hundreds of computer programming languages](https://en.wikipedia.org/wiki/List_of_programming_languages). While each has its own merit, the major languages for scientific computing are C, C++, R, MATLAB, Python, Java, Julia, and Fortran. [MATLAB](https://www.mathworks.com), [Julia](https://julialang.org/), and [Python](https://www.python.org) are similar in syntax and typically read as if they were written in plain english. This makes both languages a useful tool for teaching but they are also very powerful languages and are **very** actively used in real-life research. MATLAB is proprietary while Python is open source. A benefit of being open source is that anyone can write and release Python packages. For science, there are many wonderful community-driven packages such as [NumPy](http://www.numpy.org), [SciPy](http://www.scipy.org), [scikit-image](http://scikit-image.org), and [Pandas](http://pandas.pydata.org) just to name a few. _____no_output_____In this tutorial, we will set up a scientific Python computing environment using the [Anaconda python distribution by Continuum Analytics](https://www.continuum.io/downloads). _____no_output_____## Why Python?_____no_output_____- Beginner friendly
- Versatile and flexible
- Most mature package libraries around
- Most popular in Machine learning world_____no_output_____As is true in human language, there are [hundreds of computer programming languages](https://en.wikipedia.org/wiki/List_of_programming_languages). While each has its own merit, the major languages for scientific computing are C, C++, R, MATLAB, Python, Java, Julia, and Fortran. [MATLAB](https://www.mathworks.com), [Julia](https://julialang.org/), and [Python](https://www.python.org) are similar in syntax and typically read as if they were written in plain english. This makes both languages a useful tool for teaching but they are also very powerful languages and are **very** actively used in real-life research. MATLAB is proprietary while Python is open source. A benefit of being open source is that anyone can write and release Python packages. For science, there are many wonderful community-driven packages such as [NumPy](http://www.numpy.org), [SciPy](http://www.scipy.org), [scikit-image](http://scikit-image.org), and [Pandas](http://pandas.pydata.org) just to name a few. _____no_output_____## Installing Python 3 with Anaconda_____no_output_____### Python 3 vs Python 2_____no_output_____There are two dominant versions of Python (available through the Anaconda distribution) used for scientific computing, Python 2.7 and Python 3.7. We are at an interesting crossroads between these two versions. The most recent release (Python 3.10 ) is not backwards compatible with previous versions of Python. While there are still some packages written for Python 2.7 that have not been modified for compatibility with Python 3.7, a large number have transitioned and Python 2.7 will no longer be supported as of January 1, 2020. As this will be the future for scientific computing with Python, we will use Python 3.9 for these tutorials.
_____no_output_____### Anaconda_____no_output_____There are several scientific Python distributions available for MacOS, Windows, and Linux. The two most popular, [Enthought Canopy](https://www.enthought.com/products/canopy/) and [Anaconda](https://www.continuum.io/why-anaconda) are specifically designed for scientific computing and data science work. For this course, we will use the Anaconda Python 3.7 distribution. To install the correct version, follow the instructions below.
1. Navigate to [the Anaconda download page](https://www.continuum.io/downloads) and download the Python 3.7 graphical installer.
2. Launch the installer and follow the onscreen instructions.
Congratulations! You now have the beginnings of a scientific Python distribution._____no_output_____### Using JupyterLab as a Scientific Development Environment_____no_output_____Packaged with the Anaconda Python distribution is the [Jupyter project](https://jupyter.org/). This environment is incredibly useful for interactive programming and development and is widely used across scientific computing. Jupyter allows for interactive programming in a large array of programming languages including Julia, R, and MATLAB. As you've guessed by this point, we will be focusing on using Python through the Jupyter Environment.
The key component of the Jupyter interactive programming environment is the [Jupyter Notebook](https://jupyter.org/). This acts lkike an interactive script which allows one to interweave code, mathematics, and text to create a complete narrative around your computational project. In fact, you are reading a Jupyter Notebook right now!
While Jupyter Notebooks are fantastic alone, we will be using them throughout the course via the [JupyterLab Integrated Development Environment (IDE)](https://jupyter.org/). JupyterLab allows omne to write code in notebooks, navigate around your file system, write isolated python scripts, and even access a UNIX terminal, all of which we will do throughout this class. Even better, JupyterLab comes prepackaged with your Anaconda Python distribution.
_____no_output_____### Launching JupyterLab
_____no_output_____When you installed Anaconda, you also installed the Anaconda Navigator, an app that allows you to easily launch a JupyterLab instance. When you open up Anaconda Navigator, you should see a screen that looks like this,

where I have boxed in the JupyterLab prompt with a red box. Launch the JupyterLab IDE by clicking the 'launch' button. This should automatically open a browser window with the JupyterLab interface,

_____no_output_____### Creating your course directory
During the course, you will be handing in the computational portions of your homeworks as Jupyter Notebooks and, as such, it will be important for the TA's to be able to run your code to grade it. We will often be reading in data from a file on your computer, manipulating it, and then plotting the outcome. **To ensure the TA's can run your code without manipulating it, you MUST use a specific file structure.** We can set up the file structure pretty easily directly through JupyterLab.
Open the side bar of the JupyterLab interface by clicking the folder icon on the left hand side of the screen. This will slide open a file browser like so:
<center>
<img src="filebrowser.png" width="50%">
</center>
Your files will look different than mine (unless you're using my computer!), but it will show the contents of your computer's `home` directory.
Using the sidebar, navigate to wherever you will want to make a new folder called `Scientific-Computing` by clicking the "new folder" symbol, .
Double-click the `Scientific-Computing` folder to open it and make two new folders, one named `code` and another `data`. Your final file directory should look like so:
<center>
<img src="directory_structure.png" width="50%">
</center>
That's it! You've now made the file structure for the class.
All of the Jupyter Notebooks you use in the course will be made and wirttin in the `code` folder. All data you have to load will live in the `data` directory. This structure will make things easier for the TA when it comes to grading your work, but will also help you maintain a tidy homework folder.
_____no_output_____### Starting A Jupyter Notebook
_____no_output_____Let's open a new notebook. Navigate to your `code` folder and click the `+` in the sidebar. This will open a new "Launcher" window where a variety of new filetypes can be opened. One of them will be a "Python 3 Notebook".
<center>
<img src="launcher.png" width="50%">
</center>
Clicking this will open a new Jupyter Nook named `Untitled.ipynb`.
<center>
<img src="notebook.png" width="50%">
</center>
Right-click the "Untitled.ipynb" in the sidebar and rename it to something more informative, say `testing_out_python.ipynb`.
The right-ha d side of your screen is the actual notebook. You will see a "code cell" (grey rectangle) along with a bunch of other boxes above it. In the [Jupyter Notebook Tutorial](http://rpgroup.caltech.edu/bige105/tutorials/t0b/t0b_jupyter_notebooks) we cover these buttons in detail. For now, we'll just check to make sure you have a working Python distribution.
_____no_output_____## `Hello, World`
Let's write our first bit of Python code to make sure that everything is working correctly on your system. In Jupyter Notebooks, all code is typed in grey rectangles called "code cells". When a cell is "run", the result of the computation is shown underneath the code cell. Double-click the code cell on the right-hand side of your JupyterLab window and type the following:_____no_output_____
<code>
# This a comment and won't be read by Python. All comments start with `#`
print('Hello, World. Long time, no see. This sentence should be printed below by pressing `Shift + Enter` ')Hello, World. Long time, no see. This sentence should be printed below by pressing `Shift + Enter`
</code>
Note that you cannot edit the text *below* the code cell. This is the output of the `print()` function in Python.
### Our First Plot
This class will often require you to generate plots of your computations coupled with some comments about your interpretation. Let's try to generate a simple plot here to make sure everything is working with your distribution. Don't worry too much about the syntax for right now. The basics of Python syntax are given in [Tutorial 0c](http://rpgroup.caltech.edu/bige105/tutorials/t0b/t0c_python_syntax_and_plotting).
Add a new code cell beneath the one that contains `print('Hello, Pangaea')`. When you execute a cell using `Shift + Enter`, a new cell should appear beneath what you just ran. If it's not there, you can make a new cell by clicking the `+` icon in the notebook menu bar. In the new cell, type the following:_____no_output_____
<code>
# Import Python packages necessary for this script
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
# Generate a beautiful sinusoidal curve
x = np.linspace(0, 2*np.pi, 500)
y = np.sin(2 * np.sin(2 * np.sin(2 * x)))
plt.plot(x, y)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.show()_____no_output_____
</code>
If you can see this plot in your notebook, then congratulations! You have a working Python 3.7 distribution. _____no_output_____### Installing extra packages using Conda _____no_output_____With the Anaconda Python distribution, you can install verified packages (scientific and non-scientific) through the [Conda](http://conda.pydata.org/docs/) package manager. **Note that you do not have to download Conda separately. This comes packaged with Anaconda**. To install packages through Conda, we must manually enter their names on the command line.
One of your first computational homeworks will involve doing some rudimentary bioinformatics to compare sequences of the `ENAM` gene among cetaceans. To do so, we will use the [BioPython](http://biopython.org) package which does not come prepackaged along with Anaconda. Let's install it using the command line that is built in with Jupyter Lab.
On the sidebar menu, open a new Launcher window by clicking the `+` button (just like we did to make a new Jupyter Notebook). Now, instead of opening a notebook, choose the "Terminal" selection at the bottom.
<center>
<img src="launch_terminal.png" width="50%">
</center>
This will open a new tab on the right-hand side of your screen and will launch a shell environment (yours may look different than mine). Click on the command line, type
```
conda install xxx
```
and hit enter. After a few seconds (or a minute, depending on your internet connection), you should be greeted with the following screen:
<center>
<img src="install_biopython.png" width="50%">
</center>
Note that at the bottom it asks for your permission to install teh package and update its dependencies, if necessary. Type `y` and then hit enter. Biopython will then be installed. _____no_output_____
| {
"repository": "mazhengcn/scientific-computing-with-python",
"path": "Chapter00/t0a/t0a_setting_up_python.ipynb",
"matched_keywords": [
"BioPython",
"bioinformatics"
],
"stars": null,
"size": 39491,
"hexsha": "d09c502f899d42251d7c0cbca06eff31123e407a",
"max_line_length": 23586,
"avg_line_length": 116.4926253687,
"alphanum_fraction": 0.8465219924
} |
# Notebook from nthndy/cnn-annotator
Path: notebooks/B_CNN_Data_Preview_Images.ipynb
# CNN Image Data Preview & Statistics
### Welcome!
This notebook allows you to preview some of your single-cell image patches to make sure your annotated data are of good quality. You will also get a chance to calculate the statistics for your annotated data which can be useful for data preprocessing, e.g. *class imbalance check* prior to CNN training.
_____no_output_____
<code>
import os
import json
import random
import zipfile
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
from datetime import datetime
from skimage.io import imread_____no_output_____
</code>
### Specify how many patches you'd like to visualise from your batch:
By default, the code below will allow you to see any 10 random patches per each class. If there is not enough training data for any label, a noisy image will be visualised. The default setting doesn't save the collage out, but you can change it by specifying the ```save_collage``` to ```True```.
_____no_output_____
<code>
LABELS = ["Interphase", "Prometaphase", "Metaphase", "Anaphase", "Apoptosis"]
patches_to_show = 10
save_collage = False
_____no_output_____
</code>
### Load a random 'annotation' zip file to check image patches:_____no_output_____
<code>
zipfiles = [f for f in os.listdir("./") if f.startswith("annotation") and f.endswith(".zip")]
zip_file_name = zipfiles[0]
_____no_output_____
</code>
### Optional: specify which zip file you'd like to visualise:_____no_output_____
<code>
#zip_file_name = "annotation_02-08-2021--10-33-59.zip"
_____no_output_____
</code>
### Process the zip file & extract subfolders with individual images:_____no_output_____
<code>
# Make sure zip file name is stripped of '.zip' suffix:
if zip_file_name.endswith(".zip"):
zip_file_name = zip_file_name.split(".zip")[0]
# Check if the zipfile was extracted:
if not zip_file_name in os.listdir("./"):
print (f"Zip file {zip_file_name}.zip : Exporting...", end="\t")
with zipfile.ZipFile(f"./{zip_file_name}.zip", 'r') as zip_ref:
zip_ref.extractall(f"./{zip_file_name}/")
else:
print (f"Zip file {zip_file_name}.zip : Exported!...", end="\t")
print ("Done!")
Zip file annotation_02-08-2021--10-33-59.zip : Exporting... Done!
</code>
### Plot the collage with all 5 labels: _____no_output_____
<code>
fig, axs = plt.subplots(figsize=(int(len(LABELS)*5), int(patches_to_show*5)),
nrows=patches_to_show, ncols=len(LABELS),
sharex=True, sharey=True)
for idx in range(len(LABELS)):
label = LABELS[idx]
label_dr = f"./{zip_file_name}/{label}/"
# Check if directory exists:
if os.path.isdir(label_dr):
patch_list = os.listdir(label_dr)
random.shuffle(patch_list)
print (f"Label: {label} contains {len(patch_list)} single-cell image patches")
else:
patch_list = []
print (f"Label: {label} has not been annotated.")
# Plot the patches:
for i in range(patches_to_show):
# Set titles to individual columns
if i == 0:
axs[i][idx].set_title(f"Label: {label}", fontsize=16)
if i >= len(patch_list):
patch = np.random.randint(0,255,size=(64,64)).astype(np.uint8)
axs[i][idx].text(x=32, y=32, s="noise", size=50, rotation=30., ha="center", va="center",
bbox=dict(boxstyle="round", ec=(0.0, 0.0, 0.0), fc=(1.0, 1.0, 1.0)))
else:
patch = plt.imread(label_dr + patch_list[i])
axs[i][idx].imshow(patch, cmap="binary_r")
axs[i][idx].axis('off')
if save_collage is True:
plt.savefig("../label_image_patches.png", bbox_to_inches='tight')
plt.show()
plt.close()
Label: Interphase contains 6 single-cell image patches
Label: Prometaphase contains 6 single-cell image patches
Label: Metaphase contains 5 single-cell image patches
Label: Anaphase contains 8 single-cell image patches
Label: Apoptosis contains 6 single-cell image patches
</code>
## Calculate some data statistics WITHOUT unzipping the files:_____no_output_____
<code>
label_count = dict({'Prometaphase' : 0, 'Metaphase' : 0, 'Interphase' : 0, 'Anaphase' : 0, 'Apoptosis' : 0})
for f in tqdm(zipfiles):
archive = zipfile.ZipFile(f, 'r')
json_data = archive.read(f.split(".zip")[0] + ".json")
data = json.loads(json_data)
# Count instances per label:
counts = [[x, data['labels'].count(x)] for x in set(data['labels'])]
print (f"File: {f}\n\t{counts}")
# Add counts to label counter:
for lab in counts:
label_count[lab[0]] += lab[1]
100%|██████████| 1/1 [00:00<00:00, 255.97it/s]
</code>
### Plot the statistics:_____no_output_____
<code>
COLOR_CYCLE = [
'#1f77b4', # blue
'#ff7f0e', # orange
'#2ca02c', # green
'#d62728', # red
'#9467bd', # purple
]_____no_output_____# Plot the bar graph:
plt.bar(range(len(label_count)), list(label_count.values()), align='center', color=COLOR_CYCLE)
plt.xticks(range(len(label_count)), list(label_count.keys()), rotation=30)
plt.title("Single-Cell Patches per Label")
plt.xlabel("Class Label")
plt.ylabel("Patch Count")
plt.grid(axis='y', alpha=0.3)
plt.show()
plt.close()
_____no_output_____
</code>
### Done!_____no_output_____
| {
"repository": "nthndy/cnn-annotator",
"path": "notebooks/B_CNN_Data_Preview_Images.ipynb",
"matched_keywords": [
"single-cell"
],
"stars": null,
"size": 816865,
"hexsha": "d09c713ad18dc2fcca1e74fa371c2893976103b0",
"max_line_length": 791192,
"avg_line_length": 2320.6392045455,
"alphanum_fraction": 0.9539458784
} |
# Notebook from ishanku/sqlalchemy-challenge
Path: climate_starter.ipynb
<code>
%matplotlib inline
from matplotlib import style
style.use('fivethirtyeight')
import matplotlib.pyplot as plt_____no_output_____import numpy as np
import pandas as pd_____no_output_____import datetime as dt_____no_output_____
</code>
# Reflect Tables into SQLAlchemy ORM_____no_output_____
<code>
# Python SQL toolkit and Object Relational Mapper
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func, inspect_____no_output_____engine = create_engine("sqlite:///Resources/hawaii.sqlite")_____no_output_____# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)_____no_output_____# We can view all of the classes that automap found
Base.classes.keys()_____no_output_____# Save references to each table
Measurement = Base.classes.measurement
Station = Base.classes.station_____no_output_____# Create our session (link) from Python to the DB
session = Session(engine)_____no_output_____inspector = inspect(engine)_____no_output_____
</code>
# Exploratory Climate Analysis_____no_output_____
<code>
columns = inspector.get_columns('Measurement')
for column in columns:
print(column["name"], column["type"])id INTEGER
station TEXT
date TEXT
prcp FLOAT
tobs FLOAT
columns = inspector.get_columns('Station')
for column in columns:
print(column["name"], column["type"])id INTEGER
station TEXT
name TEXT
latitude FLOAT
longitude FLOAT
elevation FLOAT
# Design a query to retrieve the last 12 months of precipitation data and plot the results
# Calculate the date 1 year ago from the last data point in the database
LatestDate=np.ravel(session.query(Measurement.date).order_by(Measurement.date.desc()).first())
LatestDate=str(LatestDate).replace("-","").replace("'","").replace("[","").replace("]","")
LatestDate_____no_output_____#Date Calculation Using regex
import re
#Split Year, Month and Date to form a Date time format
CYear=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\1', LatestDate))
CMonth=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\2', LatestDate))
CDay=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\3', LatestDate))
LatestDateFormat = dt.datetime(CYear,CMonth,CDay)
#Subract a year
from dateutil.relativedelta import relativedelta
OneYearAgoDate =(LatestDateFormat) + relativedelta(years=-1)
# Convert Back to queriable pattern
Latest = re.sub(r'(\d{4})(\d{2})(\d{2})', r'\1-\2-\3', LatestDate)
OYear=str(OneYearAgoDate.year)
OMonth=str(OneYearAgoDate.month)
ODay=str(OneYearAgoDate.day)
if len(OMonth) == 1:
OMonth= "0" + OMonth
if len(ODay) == 1:
ODay= "0" + ODay
OneYearAgo = OYear + "-" + OMonth + "-" + ODay
Latest,OneYearAgo_____no_output_____# Perform a query to retrieve the data and precipitation scores
LastYearPreciptitationData=session.query(Measurement.date,Measurement.prcp).filter(Measurement.date >= OneYearAgo).order_by(Measurement.date.desc()).all()_____no_output_____session.query(Measurement.date,Measurement.prcp).filter(Measurement.date >= OneYearAgo).order_by(Measurement.date.desc()).count()_____no_output_____# Save the query results as a Pandas DataFrame and set the index to the date column
LPData=pd.DataFrame()
for L in LastYearPreciptitationData:
df=pd.DataFrame({'Date':[L[0]],"Prcp":[L[1]]})
LPData=LPData.append(df)
# Sort the dataframe by date
LPData=LPData.set_index('Date').sort_values(by="Date",ascending=False)
LPData.head(10)_____no_output_____
</code>
_____no_output_____
<code>
# Use Pandas Plotting with Matplotlib to plot the data
LPData.plot(rot=90);
plt.ylim(0,7)
plt.xlabel("Date")
plt.ylabel("Rain (Inches)")
plt.title("Precipitation Analysis")
plt.legend(["Precipitation"])
plt.savefig("./Output/Figure1.png")
plt.show()_____no_output_____# Use Pandas to calcualte the summary statistics for the precipitation data
LPData.describe()_____no_output_____
</code>
_____no_output_____
<code>
# Design a query to show how many stations are available in this dataset?
# ---- From Measurement Data
session.query(Measurement.station).group_by(Measurement.station).count()
#----From Station Date
session.query(Station).count()_____no_output_____#-- Method 1 -- Using DataFrame
# What are the most active stations? (i.e. what stations have the most rows)?
# List the stations and the counts in descending order.
Stations=session.query(Measurement.station,Measurement.tobs).all()
station_df=pd.DataFrame()
for s in Stations:
df=pd.DataFrame({"Station":[s.station],"Tobs":[s.tobs]})
station_df=station_df.append(df)
ActiveStation=station_df.Station.value_counts()
ActiveStation_____no_output_____#-- Method 2 -- Using Direct Query
ActiveStationList=session.query(Measurement.station,func.count(Measurement.tobs)).group_by(Measurement.station).order_by(func.count(Measurement.tobs).desc()).all()
ActiveStationList_____no_output_____# Using the station id from the previous query, calculate the lowest temperature recorded,
# highest temperature recorded, and average temperature of the most active station?
station_df[station_df.Station == 'USC00519281'].Tobs.min(),station_df[station_df.Station == 'USC00519281'].Tobs.max(),station_df[station_df.Station == 'USC00519281'].Tobs.mean()_____no_output_____# Choose the station with the highest number of temperature observations.
print(f"The Station with Highest Number of temperature obervations is {ActiveStationList[0][0]} and the No of Observations are {ActiveStationList[0][1]}")The Station with Highest Number of temperature obervations is USC00519281 and the No of Observations are 2772
# Query the last 12 months of temperature observation data for this station and plot the results as a histogram
Last12TempO=session.query(Measurement.tobs).filter(Measurement.date > OneYearAgo).filter(Measurement.station==ActiveStationList[0][0]).all()
df=pd.DataFrame(Last12TempO)
plt.hist(df['tobs'],12,color='purple',hatch="/",edgecolor="yellow")
plt.xlabel("Temperature",fontsize=14)
plt.ylabel("Frequency", fontsize=14)
plt.title("One Year Temperature (For Station USC00519281)",fontsize=14)
labels=["Temperature obervation"]
plt.legend(labels)
plt.savefig("./Output/Figure2.png")
plt.show()_____no_output_____
</code>
_____no_output_____
<code>
# This function called `calc_temps` will accept start date and end date in the format '%Y-%m-%d'
# and return the minimum, average, and maximum temperatures for that range of dates
def calc_temps(start_date, end_date):
"""TMIN, TAVG, and TMAX for a list of dates.
Args:
start_date (string): A date string in the format %Y-%m-%d
end_date (string): A date string in the format %Y-%m-%d
Returns:
TMIN, TAVE, and TMAX
"""
return session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start_date).filter(Measurement.date <= end_date).all()
# function usage example
print(calc_temps('2012-02-28', '2012-03-05'))[(62.0, 69.57142857142857, 74.0)]
#----First Sample
# Use your previous function `calc_temps` to calculate the tmin, tavg, and tmax
# for your trip using the previous year's data for those same dates.
TemperatureAverageLast12Months=calc_temps(OneYearAgo, Latest)
print(TemperatureAverageLast12Months)[(58.0, 74.59058295964125, 87.0)]
#----Second Sample
calc_temps('2015-08-21', '2016-08-21')_____no_output_____# Plot the results from your previous query as a bar chart.
# Use "Trip Avg Temp" as your Title
# Use the average temperature for the y value
# Use the peak-to-peak (tmax-tmin) value as the y error bar (yerr)
Error = TemperatureAverageLast12Months[0][2]-TemperatureAverageLast12Months[0][0]
AverageTemp = TemperatureAverageLast12Months[0][1]
MinTemp = TemperatureAverageLast12Months[0][0]
MaxTemp = TemperatureAverageLast12Months[0][2]
fig, ax = plt.subplots(figsize=(5,6))
bar_chart = ax.bar(1 , AverageTemp, color= 'salmon', tick_label='',yerr=Error, alpha=0.6)
ax.set_xlabel("Trip")
ax.set_ylabel("Temp (F)")
ax.set_title("Trip Avg Temp")
def autolabels(rects):
for rect in rects:
h=rect.get_height()
#label the bars
autolabels(bar_chart)
plt.ylim(0, 100)
plt.xlim(0,2)
ax.xaxis.grid()
fig.tight_layout()
plt.savefig("./Output/temperature.png")
plt.show()_____no_output_____# Plot the results from your previous query as a bar chart.
# Use "Trip Avg Temp" as your Title
# Use the average temperature for the y value
# Use the peak-to-peak (tmax-tmin) value as the y error bar (yerr)
_____no_output_____TripStartTime= '2016-08-21'
TripEndTime = '2016-08-30'
FirstStep = [Station.station, Station.name, Station.latitude, Station.longitude, Station.elevation, func.sum(Measurement.prcp)]
PlaceForTrip = session.query(*FirstStep).\
filter(Measurement.station == Station.station).\
filter(Measurement.date >= TripStartTime).\
filter(Measurement.date <= TripEndTime).\
group_by(Station.name).order_by(func.sum(Measurement.prcp).desc()).all()
print (PlaceForTrip)[('USC00516128', 'MANOA LYON ARBO 785.2, HI US', 21.3331, -157.8025, 152.4, 7.560000000000001), ('USC00519281', 'WAIHEE 837.5, HI US', 21.45167, -157.84888999999998, 32.9, 7.479999999999999), ('USC00513117', 'KANEOHE 838.1, HI US', 21.4234, -157.8015, 14.6, 4.16), ('USC00514830', 'KUALOA RANCH HEADQUARTERS 886.9, HI US', 21.5213, -157.8374, 7.0, 2.55), ('USC00519523', 'WAIMANALO EXPERIMENTAL FARM, HI US', 21.33556, -157.71139, 19.5, 2.4), ('USC00519397', 'WAIKIKI 717.2, HI US', 21.2716, -157.8168, 3.0, 0.69), ('USC00517948', 'PEARL CITY, HI US', 21.3934, -157.9751, 11.9, 0.06)]
# Calculate the total amount of rainfall per weather station for your trip dates using the previous year's matching dates.
# Sort this in descending order by precipitation amount and list the station, name, latitude, longitude, and elevation
[('USC00516128', 'MANOA LYON ARBO 785.2, HI US', 21.3331, -157.8025, 152.4, 0.31), ('USC00519281', 'WAIHEE 837.5, HI US', 21.45167, -157.84888999999998, 32.9, 0.25), ('USC00518838', 'UPPER WAHIAWA 874.3, HI US', 21.4992, -158.0111, 306.6, 0.1), ('USC00513117', 'KANEOHE 838.1, HI US', 21.4234, -157.8015, 14.6, 0.060000000000000005), ('USC00511918', 'HONOLULU OBSERVATORY 702.2, HI US', 21.3152, -157.9992, 0.9, 0.0), ('USC00514830', 'KUALOA RANCH HEADQUARTERS 886.9, HI US', 21.5213, -157.8374, 7.0, 0.0), ('USC00517948', 'PEARL CITY, HI US', 21.3934, -157.9751, 11.9, 0.0), ('USC00519397', 'WAIKIKI 717.2, HI US', 21.2716, -157.8168, 3.0, 0.0), ('USC00519523', 'WAIMANALO EXPERIMENTAL FARM, HI US', 21.33556, -157.71139, 19.5, 0.0)]
</code>
## Optional Challenge Assignment_____no_output_____
<code>
# Create a query that will calculate the daily normals
# (i.e. the averages for tmin, tmax, and tavg for all historic data matching a specific month and day)
def daily_normals(date):
"""Daily Normals.
Args:
date (str): A date string in the format '%m-%d'
Returns:
A list of tuples containing the daily normals, tmin, tavg, and tmax
"""
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)]
return session.query(*sel).filter(func.strftime("%m-%d", Measurement.date) == date).all()
daily_normals("01-01")_____no_output_____# calculate the daily normals for your trip
# push each tuple of calculations into a list called `normals`
normals=[]
# Set the start and end date of the trip
TripStartTime= '2016-08-21'
TripEndTime = '2016-08-30'
# Stip off the year and save a list of %m-%d strings
TripStartTime=TripStartTime.replace("-","")
StartDate=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\3', TripStartTime))
TripEndTime=TripEndTime.replace("-","")
EndDate=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\3', TripEndTime))
TripMonth=re.sub(r'(\d{4})(\d{2})(\d{2})', r'\2', TripEndTime)
if len(TripMonth) == 1:
TripMonth= "0" + TripMonth
# Use the start and end date to create a range of dates
Dates = [f"{TripMonth}-{num}" for num in range(StartDate, EndDate)]
# Loop through the list of %m-%d strings and calculate the normals for each date
for d in Dates:
Normal = daily_normals(d)
normals.extend(Normal)
normals_____no_output_____# Load the previous query results into a Pandas DataFrame and add the `trip_dates` range as the `date` index
TempMin = [x[0] for x in normals]
TempAvg = [x[1] for x in normals]
TempMax = [x[2] for x in normals]
SYear=int(re.sub(r'(\d{4})(\d{2})(\d{2})', r'\1', TripStartTime))
TripDatesYear = [f"{SYear}-{d}" for d in Dates]
TripDatesYear
trip_normals = pd.DataFrame({"TempMin":TempMin, "TempAvg":TempAvg, "TempMax":TempMax, "date":TripDatesYear}).set_index("date")
trip_normals.head()_____no_output_____# Plot the daily normals as an area plot with `stacked=False`
trip_normals.plot(kind="area", stacked=False)
plt.legend(loc="right")
plt.ylabel("Temperature (F)")
plt.xticks(range(len(trip_normals.index)), trip_normals.index, rotation="60")
plt.savefig("./Output/daily-normals.png")
plt.show()_____no_output_____# Plot the daily normals as an area plot with `stacked=False`
_____no_output_____
</code>
| {
"repository": "ishanku/sqlalchemy-challenge",
"path": "climate_starter.ipynb",
"matched_keywords": [
"Salmon"
],
"stars": null,
"size": 218312,
"hexsha": "d09d2be4ab5240937e36ac2d11089799b0bfea52",
"max_line_length": 41527,
"avg_line_length": 112.0698151951,
"alphanum_fraction": 0.8196480267
} |
# Notebook from davidlmobley/drug-computing
Path: uci-pharmsci/lectures/cluster_and_visualize/MDTraj Examples/native-contact.ipynb
## Computing native contacts with MDTraj
Using the definition from Best, Hummer, and Eaton, "Native contacts determine protein folding mechanisms in atomistic simulations" PNAS (2013) [10.1073/pnas.1311599110](http://dx.doi.org/10.1073/pnas.1311599110)
Eq. (1) of the SI defines the expression for the fraction of native contacts, $Q(X)$:
$$
Q(X) = \frac{1}{|S|} \sum_{(i,j) \in S} \frac{1}{1 + \exp[\beta(r_{ij}(X) - \lambda r_{ij}^0)]},
$$
where
- $X$ is a conformation,
- $r_{ij}(X)$ is the distance between atoms $i$ and $j$ in conformation $X$,
- $r^0_{ij}$ is the distance from heavy atom i to j in the native state conformation,
- $S$ is the set of all pairs of heavy atoms $(i,j)$ belonging to residues $\theta_i$ and $\theta_j$ such that $|\theta_i - \theta_j| > 3$ and $r^0_{i,} < 4.5 \unicode{x212B}$,
- $\beta=5 \unicode{x212B}^{-1}$,
- $\lambda=1.8$ for all-atom simulations_____no_output_____
<code>
import numpy as np
import mdtraj as md
from itertools import combinations
def best_hummer_q(traj, native):
"""Compute the fraction of native contacts according the definition from
Best, Hummer and Eaton [1]
Parameters
----------
traj : md.Trajectory
The trajectory to do the computation for
native : md.Trajectory
The 'native state'. This can be an entire trajecory, or just a single frame.
Only the first conformation is used
Returns
-------
q : np.array, shape=(len(traj),)
The fraction of native contacts in each frame of `traj`
References
----------
..[1] Best, Hummer, and Eaton, "Native contacts determine protein folding
mechanisms in atomistic simulations" PNAS (2013)
"""
BETA_CONST = 50 # 1/nm
LAMBDA_CONST = 1.8
NATIVE_CUTOFF = 0.45 # nanometers
# get the indices of all of the heavy atoms
heavy = native.topology.select_atom_indices('heavy')
# get the pairs of heavy atoms which are farther than 3
# residues apart
heavy_pairs = np.array(
[(i,j) for (i,j) in combinations(heavy, 2)
if abs(native.topology.atom(i).residue.index - \
native.topology.atom(j).residue.index) > 3])
# compute the distances between these pairs in the native state
heavy_pairs_distances = md.compute_distances(native[0], heavy_pairs)[0]
# and get the pairs s.t. the distance is less than NATIVE_CUTOFF
native_contacts = heavy_pairs[heavy_pairs_distances < NATIVE_CUTOFF]
print("Number of native contacts", len(native_contacts))
# now compute these distances for the whole trajectory
r = md.compute_distances(traj, native_contacts)
# and recompute them for just the native state
r0 = md.compute_distances(native[0], native_contacts)
q = np.mean(1.0 / (1 + np.exp(BETA_CONST * (r - LAMBDA_CONST * r0))), axis=1)
return q _____no_output_____# pull a random protein from the PDB
# (The unitcell info happens to be wrong)
traj = md.load_pdb('http://www.rcsb.org/pdb/files/2MI7.pdb')
# just for example, use the first frame as the 'native' conformation
q = best_hummer_q(traj, traj[0])_____no_output_____%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(q)
plt.xlabel('Frame', fontsize=14)
plt.ylabel('Q(X)', fontsize=14)
plt.show()_____no_output_____
</code>
| {
"repository": "davidlmobley/drug-computing",
"path": "uci-pharmsci/lectures/cluster_and_visualize/MDTraj Examples/native-contact.ipynb",
"matched_keywords": [
"protein folding"
],
"stars": 103,
"size": 4954,
"hexsha": "d09facb4b23a0c1191418c9321e3b8b97c0c2c56",
"max_line_length": 222,
"avg_line_length": 34.4027777778,
"alphanum_fraction": 0.5502624142
} |
# Notebook from stklik/crestdsl-docker
Path: GettingStarted.ipynb
# Getting Started with CREST
CREST is a hybrid modelling DSL (domain-specific language) that focuses on the flow of resources within cyber-physical systems (CPS).
CREST is implemented in the Python programming language as the `crestdsl` internal DSL and shipped as Python package.
`crestdsl`'s source code is hosted on GitHub https://github.com/stklik/CREST/
You can also visit the [documentation](https://crestdsl.readthedocs.io)
for more information.
## This Notebook
The purpose of this notebook is to provide a small showcase of modelling with `crestdsl`.
The system to be modelled is a growing lamp that produces light and heat, if the lamp is turned on and electricity is provided._____no_output_____## How to use this Jupyter notebook:
Select a code-cell (such as the one directly below) and click the `Run` button in the menu bar above to execute it. (Alternatively, you can use the keyboard combination `Ctrl+Enter`.)
**Output:** will be shown directly underneath the cell, if there is any.
To **run all cells**, you can iteratively execute individual cells, or execute all at once via the menu item `Cell` -> `Run all`
Remember, that the order in which you execute cells is important, not the placement of a cell within the notebook.
For a more profound introduction, go and visit the [Project Jupyter](http://jupyter.org/) website. _____no_output_____
<code>
print("Try executing this cell, so you ge a feeling for it.")
2 + 2 # this should print "Out[X]: 4" directly underneath (X will be an index)_____no_output_____
</code>
# Defining a `crestdsl` Model_____no_output_____## Import `crestdsl`
In order to use `crestdsl`, you have to import it.
Initially, we will create work towards creating a system model, so let's import the `model` subpackage._____no_output_____
<code>
import crestdsl.model as crest_____no_output_____
</code>
## Define Resources
First, it is necessary to define the resource types that will be used in the application.
In CREST and `crestdsl`, resources are combinations of resource names and their value domains.
Value domains can be infinite, such as Real and Integers or discrete such as `["on", "off"]`, as shown for the switch._____no_output_____
<code>
electricity = crest.Resource("Watt", crest.REAL)
switch = crest.Resource("switch", ["on", "off"])
light = crest.Resource("Lumen", crest.INTEGER)
counter = crest.Resource("Count", crest.INTEGER)
time = crest.Resource("minutes", crest.REAL)
celsius = crest.Resource("Celsius", crest.REAL)
fahrenheit = crest.Resource("Fahrenheit", crest.REAL)_____no_output_____
</code>
## Our First Entity
In CREST any system or component is modelled as Entity.
Entities can be composed hierachically (as we will see later).
To model an entity, we define a Python class that inherits from `crest.Entity`.
Entities can define
- `Input`, `Output` and `Local` ports (variables),
- `State` objects and a `current` state
- `Transition`s between states
- `Influence`s between ports (to express value dependencies between ports)
- `Update`s that are continuously executed and write values to a port
- and `Action`s, which allow the modelling of discrete changes during transition firings.
Below, we define the `LightElement` entity, which models the component that is responsible for producing light from electricity. It defines one input and one output port._____no_output_____
<code>
class LightElement(crest.Entity):
"""This is a definition of a new Entity type. It derives from CREST's Entity base class."""
"""we define ports - each has a resource and an initial value"""
electricity_in = crest.Input(resource=electricity, value=0)
light_out = crest.Output(resource=light, value=0)
"""automaton states - don't forget to specify one as the current state"""
on = crest.State()
off = current = crest.State()
"""transitions and guards (as lambdas)"""
off_to_on = crest.Transition(source=off, target=on, guard=(lambda self: self.electricity_in.value >= 100))
on_to_off = crest.Transition(source=on, target=off, guard=(lambda self: self.electricity_in.value < 100))
"""
update functions. They are related to a state, define the port to be updated and return the port's new value
Remember that updates need two parameters: self and dt.
"""
@crest.update(state=on, target=light_out)
def set_light_on(self, dt=0):
return 800
@crest.update(state=off, target=light_out)
def set_light_off(self, dt=0):
return 0_____no_output_____
</code>
## Visualising Entities
By default, CREST is a graphical language. Therefore it only makes sense to implement a graphical visualisation of `crestdsl` systems.
One of the plotting engines is defined in the `crestdsl.ui` module.
The code below produces an interactive HTML output.
You can easily interact with the model to explore it:
- Move objects around if the automatic layout does not provide an sufficiently good layout.
- Select ports and states to see their outgoing arcs (blue) and incoming arcs (red).
- Hover over transitions, influences and actions to display their name and short summary.
- Double click on transitions, influences and actions you will see their source code.
- There is a *hot corner* on the top left of each entity. You can double-click it to collapse the entity. This feature is useful for CREST diagrams with many entities. *Unfortunately a software issue prevents the expand/collapse icon not to be displayed. It still works though (notice your cursor changing to a pointer)*
**GO AHEAD AND TRY IT**_____no_output_____
<code>
# import the plotting libraries that can visualise the CREST systems
from crestdsl.ui import plot
plot(LightElement())_____no_output_____
</code>
## Define Another Entity (The HeatElement)
It's time to model the heating component of our growing lamp.
Its functionality is simple: if the `switch_in` input is `on`, 1% of the electricity is converted to addtional heat under the lamp.
Thus, for example, by providing 100 Watt, the temperature underneath the lamp grows by 1 degree centigrade._____no_output_____
<code>
class HeatElement(crest.Entity):
""" Ports """
electricity_in = crest.Input(resource=electricity, value=0)
switch_in = crest.Input(resource=switch, value="off") # the heatelement has its own switch
heat_out = crest.Output(resource=celsius, value=0) # and produces a celsius value (i.e. the temperature increase underneath the lamp)
""" Automaton (States) """
state = current = crest.State() # the only state of this entity
"""Update"""
@crest.update(state=state, target=heat_out)
def heat_output(self, dt):
# When the lamp is on, then we convert electricity to temperature at a rate of 100Watt = 1Celsius
if self.switch_in.value == "on":
return self.electricity_in.value / 100
else:
return 0
# show us what it looks like
plot(HeatElement())_____no_output_____
</code>
## Adder - A Logical Entity
CREST does not specify a special connector type that defines what is happening for multiple incoming influence, etc. Instead standard entities are used to define add, minimum and maximum calculation which is then written to the actual target port using an influence.
We call such entities *logical*, since they don't have a real-world counterpart._____no_output_____
<code>
# a logical entity can inherit from LogicalEntity,
# to emphasize that it does not relate to the real world
class Adder(crest.LogicalEntity):
heat_in = crest.Input(resource=celsius, value=0)
room_temp_in = crest.Input(resource=celsius, value=22)
temperature_out = crest.Output(resource=celsius, value=22)
state = current = crest.State()
@crest.update(state=state, target=temperature_out)
def add(self, dt):
return self.heat_in.value + self.room_temp_in.value
plot(Adder()) # try adding the display option 'show_update_ports=True' and see what happens!_____no_output_____
</code>
## Put it all together - Create the `GrowLamp`
Finally, we create the entire `GrowLamp` entity based on the components we already created.
We define subentities in a similar way to all other definitions - as class variables.
Additionally, we use influences to connect the ports to each other._____no_output_____
<code>
class GrowLamp(crest.Entity):
""" - - - - - - - PORTS - - - - - - - - - - """
electricity_in = crest.Input(resource=electricity, value=0)
switch_in = crest.Input(resource=switch, value="off")
heat_switch_in = crest.Input(resource=switch, value="on")
room_temperature_in = crest.Input(resource=fahrenheit, value=71.6)
light_out = crest.Output(resource=light, value=3.1415*1000) # note that these are bogus values for now
temperature_out = crest.Output(resource=celsius, value=4242424242) # yes, nonsense..., they are updated when simulated
on_time = crest.Local(resource=time, value=0)
on_count = crest.Local(resource=counter, value=0)
""" - - - - - - - SUBENTITIES - - - - - - - - - - """
lightelement = LightElement()
heatelement = HeatElement()
adder = Adder()
""" - - - - - - - INFLUENCES - - - - - - - - - - """
"""
Influences specify a source port and a target port.
They are always executed, independent of the automaton's state.
Since they are called directly with the source-port's value, a self-parameter is not necessary.
"""
@crest.influence(source=room_temperature_in, target=adder.room_temp_in)
def celsius_to_fahrenheit(value):
return (value - 32) * 5 / 9
# we can also define updates and influences with lambda functions...
heat_to_add = crest.Influence(source=heatelement.heat_out, target=adder.heat_in, function=(lambda val: val))
# if the lambda function doesn't do anything (like the one above) we can omit it entirely...
add_to_temp = crest.Influence(source=adder.temperature_out, target=temperature_out)
light_to_light = crest.Influence(source=lightelement.light_out, target=light_out)
heat_switch_influence = crest.Influence(source=heat_switch_in, target=heatelement.switch_in)
""" - - - - - - - STATES & TRANSITIONS - - - - - - - - - - """
on = crest.State()
off = current = crest.State()
error = crest.State()
off_to_on = crest.Transition(source=off, target=on, guard=(lambda self: self.switch_in.value == "on" and self.electricity_in.value >= 100))
on_to_off = crest.Transition(source=on, target=off, guard=(lambda self: self.switch_in.value == "off" or self.electricity_in.value < 100))
# transition to error state if the lamp ran for more than 1000.5 time units
@crest.transition(source=on, target=error)
def to_error(self):
"""More complex transitions can be defined as a function. We can use variables and calculations"""
timeout = self.on_time.value >= 1000.5
heat_is_on = self.heatelement.switch_in.value == "on"
return timeout and heat_is_on
""" - - - - - - - UPDATES - - - - - - - - - - """
# LAMP is OFF or ERROR
@crest.update(state=[off, error], target=lightelement.electricity_in)
def update_light_elec_off(self, dt):
# no electricity
return 0
@crest.update(state=[off, error], target=heatelement.electricity_in)
def update_heat_elec_off(self, dt):
# no electricity
return 0
# LAMP is ON
@crest.update(state=on, target=lightelement.electricity_in)
def update_light_elec_on(self, dt):
# the lightelement gets the first 100Watt
return 100
@crest.update(state=on, target=heatelement.electricity_in)
def update_heat_elec_on(self, dt):
# the heatelement gets the rest
return self.electricity_in.value - 100
@crest.update(state=on, target=on_time)
def update_time(self, dt):
# also update the on_time so we know whether we overheat
return self.on_time.value + dt
""" - - - - - - - ACTIONS - - - - - - - - - - """
# let's add an action that counts the number of times we switch to state "on"
@crest.action(transition=off_to_on, target=on_count)
def count_switching_on(self):
"""
Actions are functions that are executed when the related transition is fired.
Note that actions do not have a dt.
"""
return self.on_count.value + 1
# create an instance and plot it
plot(GrowLamp())_____no_output_____
</code>
# Simulation
Simulation allows us to execute the model and see its evolution.
`crestdsl`'s simulator is located in the `simultion` module.
In order to use it, we have to import it._____no_output_____
<code>
# import the simulator
from crestdsl.simulation import Simulator_____no_output_____
</code>
After the import, we can use a simulator by initialising it with a system model.
In our case, we will explore the `GrowLamp` system that we defined above._____no_output_____
<code>
gl = GrowLamp()
sim = Simulator(gl)_____no_output_____
</code>
## Stabilisation
The simulator will execute the system's transitions, updates and influences until reaching a fixpoint.
This process is referred to as *stabilisation*.
Once stable, there are no more transitions can be triggered and all updates/influences/actions have been executed.
After stabilisation, all ports have their correct values, calculted from preceeding ports.
In the GrowLamp, we see that the value's of the `temperature_out` and `light_out` ports are wrong (based on the dummy values we defined as their initial values).
After triggering the stabilisation, these values have been corrected.
The simulator also has a convenience API `plot()` that allows the direct plotting of the entity, without having to import and call the `elk` library._____no_output_____
<code>
sim.stabilise()
sim.plot()_____no_output_____
</code>
Stabilisaiton also has to be called after the modification of input values, such that the new values are used to update any dependent ports.
Further, all transitions have to be checked on whether they are enabled and executed if they are.
Below, we show the modification of the growlamp and stabilisation.
Compare the plot below to the plot above to see that the information has been updated._____no_output_____
<code>
# modify the growlamp instance's inputs directly, the simulator points to that object and will use it
gl.electricity_in.value = 500
gl.switch_in.value = "on"
sim.stabilise()
sim.plot()_____no_output_____
</code>
## Time advance
Evidently, we also want to simulate the behaviour over time.
The simulator's `advance(dt)` method does precisely that, by advancing `dt` time units.
Below we advance 500 time steps.
The effect is that the global system time is now `t=500` (see the growing lamp's title bar).
Additionally, the local variable `on_time`, which sums up the total amount of time the automaton has spent in the `on` state, has the value of 500 too - Just as expected!_____no_output_____
<code>
sim.advance(500)
sim.plot()_____no_output_____
</code>
# Where to go from here?
By now, you have seen how CREST and `crestdsl` can be used to define hybrid system models that combine discrete, autommata aspects with continuous time evolution.
`crestdsl` offers more functionality, including the formal verification through *timed CTL* model checking and the generation of system controllers.
To learn more about `crestdsl` go ahead and take a look at the [documentation](https://crestdsl.readthedocs.io) or visit the source [repository](https://github.com/stklik/CREST/)._____no_output_____
| {
"repository": "stklik/crestdsl-docker",
"path": "GettingStarted.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 21359,
"hexsha": "d0a0d5c69150fe0b6576910ab74cb283c61940ac",
"max_line_length": 329,
"avg_line_length": 38.6938405797,
"alphanum_fraction": 0.5982489817
} |
# Notebook from inzouzouwetrust/pytorch-lightning
Path: notebooks/04-transformers-text-classification.ipynb
<a href="https://colab.research.google.com/github/PytorchLightning/pytorch-lightning/blob/master/notebooks/04-transformers-text-classification.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____# Finetune 🤗 Transformers Models with PyTorch Lightning ⚡
This notebook will use HuggingFace's `datasets` library to get data, which will be wrapped in a `LightningDataModule`. Then, we write a class to perform text classification on any dataset from the[ GLUE Benchmark](https://gluebenchmark.com/). (We just show CoLA and MRPC due to constraint on compute/disk)
[HuggingFace's NLP Viewer](https://huggingface.co/nlp/viewer/?dataset=glue&config=cola) can help you get a feel for the two datasets we will use and what tasks they are solving for.
---
- Give us a ⭐ [on Github](https://www.github.com/PytorchLightning/pytorch-lightning/)
- Check out [the documentation](https://pytorch-lightning.readthedocs.io/en/latest/)
- Ask a question on [GitHub Discussions](https://github.com/PyTorchLightning/pytorch-lightning/discussions/)
- Join us [on Slack](https://join.slack.com/t/pytorch-lightning/shared_invite/zt-f6bl2l0l-JYMK3tbAgAmGRrlNr00f1A)
- [HuggingFace datasets](https://github.com/huggingface/datasets)
- [HuggingFace transformers](https://github.com/huggingface/transformers)_____no_output_____### Setup_____no_output_____
<code>
!pip install pytorch-lightning datasets transformers_____no_output_____from argparse import ArgumentParser
from datetime import datetime
from typing import Optional
import datasets
import numpy as np
import pytorch_lightning as pl
import torch
from torch.utils.data import DataLoader
from transformers import (
AdamW,
AutoModelForSequenceClassification,
AutoConfig,
AutoTokenizer,
get_linear_schedule_with_warmup,
glue_compute_metrics
)_____no_output_____
</code>
## GLUE DataModule_____no_output_____
<code>
class GLUEDataModule(pl.LightningDataModule):
task_text_field_map = {
'cola': ['sentence'],
'sst2': ['sentence'],
'mrpc': ['sentence1', 'sentence2'],
'qqp': ['question1', 'question2'],
'stsb': ['sentence1', 'sentence2'],
'mnli': ['premise', 'hypothesis'],
'qnli': ['question', 'sentence'],
'rte': ['sentence1', 'sentence2'],
'wnli': ['sentence1', 'sentence2'],
'ax': ['premise', 'hypothesis']
}
glue_task_num_labels = {
'cola': 2,
'sst2': 2,
'mrpc': 2,
'qqp': 2,
'stsb': 1,
'mnli': 3,
'qnli': 2,
'rte': 2,
'wnli': 2,
'ax': 3
}
loader_columns = [
'datasets_idx',
'input_ids',
'token_type_ids',
'attention_mask',
'start_positions',
'end_positions',
'labels'
]
def __init__(
self,
model_name_or_path: str,
task_name: str ='mrpc',
max_seq_length: int = 128,
train_batch_size: int = 32,
eval_batch_size: int = 32,
**kwargs
):
super().__init__()
self.model_name_or_path = model_name_or_path
self.task_name = task_name
self.max_seq_length = max_seq_length
self.train_batch_size = train_batch_size
self.eval_batch_size = eval_batch_size
self.text_fields = self.task_text_field_map[task_name]
self.num_labels = self.glue_task_num_labels[task_name]
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, use_fast=True)
def setup(self, stage):
self.dataset = datasets.load_dataset('glue', self.task_name)
for split in self.dataset.keys():
self.dataset[split] = self.dataset[split].map(
self.convert_to_features,
batched=True,
remove_columns=['label'],
)
self.columns = [c for c in self.dataset[split].column_names if c in self.loader_columns]
self.dataset[split].set_format(type="torch", columns=self.columns)
self.eval_splits = [x for x in self.dataset.keys() if 'validation' in x]
def prepare_data(self):
datasets.load_dataset('glue', self.task_name)
AutoTokenizer.from_pretrained(self.model_name_or_path, use_fast=True)
def train_dataloader(self):
return DataLoader(self.dataset['train'], batch_size=self.train_batch_size)
def val_dataloader(self):
if len(self.eval_splits) == 1:
return DataLoader(self.dataset['validation'], batch_size=self.eval_batch_size)
elif len(self.eval_splits) > 1:
return [DataLoader(self.dataset[x], batch_size=self.eval_batch_size) for x in self.eval_splits]
def test_dataloader(self):
if len(self.eval_splits) == 1:
return DataLoader(self.dataset['test'], batch_size=self.eval_batch_size)
elif len(self.eval_splits) > 1:
return [DataLoader(self.dataset[x], batch_size=self.eval_batch_size) for x in self.eval_splits]
def convert_to_features(self, example_batch, indices=None):
# Either encode single sentence or sentence pairs
if len(self.text_fields) > 1:
texts_or_text_pairs = list(zip(example_batch[self.text_fields[0]], example_batch[self.text_fields[1]]))
else:
texts_or_text_pairs = example_batch[self.text_fields[0]]
# Tokenize the text/text pairs
features = self.tokenizer.batch_encode_plus(
texts_or_text_pairs,
max_length=self.max_seq_length,
pad_to_max_length=True,
truncation=True
)
# Rename label to labels to make it easier to pass to model forward
features['labels'] = example_batch['label']
return features_____no_output_____
</code>
#### You could use this datamodule with standalone PyTorch if you wanted..._____no_output_____
<code>
dm = GLUEDataModule('distilbert-base-uncased')
dm.prepare_data()
dm.setup('fit')
next(iter(dm.train_dataloader()))_____no_output_____
</code>
## GLUE Model_____no_output_____
<code>
class GLUETransformer(pl.LightningModule):
def __init__(
self,
model_name_or_path: str,
num_labels: int,
learning_rate: float = 2e-5,
adam_epsilon: float = 1e-8,
warmup_steps: int = 0,
weight_decay: float = 0.0,
train_batch_size: int = 32,
eval_batch_size: int = 32,
eval_splits: Optional[list] = None,
**kwargs
):
super().__init__()
self.save_hyperparameters()
self.config = AutoConfig.from_pretrained(model_name_or_path, num_labels=num_labels)
self.model = AutoModelForSequenceClassification.from_pretrained(model_name_or_path, config=self.config)
self.metric = datasets.load_metric(
'glue',
self.hparams.task_name,
experiment_id=datetime.now().strftime("%d-%m-%Y_%H-%M-%S")
)
def forward(self, **inputs):
return self.model(**inputs)
def training_step(self, batch, batch_idx):
outputs = self(**batch)
loss = outputs[0]
return loss
def validation_step(self, batch, batch_idx, dataloader_idx=0):
outputs = self(**batch)
val_loss, logits = outputs[:2]
if self.hparams.num_labels >= 1:
preds = torch.argmax(logits, axis=1)
elif self.hparams.num_labels == 1:
preds = logits.squeeze()
labels = batch["labels"]
return {'loss': val_loss, "preds": preds, "labels": labels}
def validation_epoch_end(self, outputs):
if self.hparams.task_name == 'mnli':
for i, output in enumerate(outputs):
# matched or mismatched
split = self.hparams.eval_splits[i].split('_')[-1]
preds = torch.cat([x['preds'] for x in output]).detach().cpu().numpy()
labels = torch.cat([x['labels'] for x in output]).detach().cpu().numpy()
loss = torch.stack([x['loss'] for x in output]).mean()
self.log(f'val_loss_{split}', loss, prog_bar=True)
split_metrics = {f"{k}_{split}": v for k, v in self.metric.compute(predictions=preds, references=labels).items()}
self.log_dict(split_metrics, prog_bar=True)
return loss
preds = torch.cat([x['preds'] for x in outputs]).detach().cpu().numpy()
labels = torch.cat([x['labels'] for x in outputs]).detach().cpu().numpy()
loss = torch.stack([x['loss'] for x in outputs]).mean()
self.log('val_loss', loss, prog_bar=True)
self.log_dict(self.metric.compute(predictions=preds, references=labels), prog_bar=True)
return loss
def setup(self, stage):
if stage == 'fit':
# Get dataloader by calling it - train_dataloader() is called after setup() by default
train_loader = self.train_dataloader()
# Calculate total steps
self.total_steps = (
(len(train_loader.dataset) // (self.hparams.train_batch_size * max(1, self.hparams.gpus)))
// self.hparams.accumulate_grad_batches
* float(self.hparams.max_epochs)
)
def configure_optimizers(self):
"Prepare optimizer and schedule (linear warmup and decay)"
model = self.model
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": self.hparams.weight_decay,
},
{
"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer = AdamW(optimizer_grouped_parameters, lr=self.hparams.learning_rate, eps=self.hparams.adam_epsilon)
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=self.hparams.warmup_steps, num_training_steps=self.total_steps
)
scheduler = {
'scheduler': scheduler,
'interval': 'step',
'frequency': 1
}
return [optimizer], [scheduler]
@staticmethod
def add_model_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--learning_rate", default=2e-5, type=float)
parser.add_argument("--adam_epsilon", default=1e-8, type=float)
parser.add_argument("--warmup_steps", default=0, type=int)
parser.add_argument("--weight_decay", default=0.0, type=float)
return parser_____no_output_____
</code>
### ⚡ Quick Tip
- Combine arguments from your DataModule, Model, and Trainer into one for easy and robust configuration_____no_output_____
<code>
def parse_args(args=None):
parser = ArgumentParser()
parser = pl.Trainer.add_argparse_args(parser)
parser = GLUEDataModule.add_argparse_args(parser)
parser = GLUETransformer.add_model_specific_args(parser)
parser.add_argument('--seed', type=int, default=42)
return parser.parse_args(args)
def main(args):
pl.seed_everything(args.seed)
dm = GLUEDataModule.from_argparse_args(args)
dm.prepare_data()
dm.setup('fit')
model = GLUETransformer(num_labels=dm.num_labels, eval_splits=dm.eval_splits, **vars(args))
trainer = pl.Trainer.from_argparse_args(args)
return dm, model, trainer_____no_output_____
</code>
# Training_____no_output_____## CoLA
See an interactive view of the CoLA dataset in [NLP Viewer](https://huggingface.co/nlp/viewer/?dataset=glue&config=cola)_____no_output_____
<code>
mocked_args = """
--model_name_or_path albert-base-v2
--task_name cola
--max_epochs 3
--gpus 1""".split()
args = parse_args(mocked_args)
dm, model, trainer = main(args)
trainer.fit(model, dm)_____no_output_____
</code>
## MRPC
See an interactive view of the MRPC dataset in [NLP Viewer](https://huggingface.co/nlp/viewer/?dataset=glue&config=mrpc)_____no_output_____
<code>
mocked_args = """
--model_name_or_path distilbert-base-cased
--task_name mrpc
--max_epochs 3
--gpus 1""".split()
args = parse_args(mocked_args)
dm, model, trainer = main(args)
trainer.fit(model, dm)_____no_output_____
</code>
## MNLI
- The MNLI dataset is huge, so we aren't going to bother trying to train it here.
- Let's just make sure our multi-dataloader logic is right by skipping over training and going straight to validation.
See an interactive view of the MRPC dataset in [NLP Viewer](https://huggingface.co/nlp/viewer/?dataset=glue&config=mnli)_____no_output_____
<code>
mocked_args = """
--model_name_or_path distilbert-base-uncased
--task_name mnli
--max_epochs 1
--gpus 1
--limit_train_batches 10
--progress_bar_refresh_rate 20""".split()
args = parse_args(mocked_args)
dm, model, trainer = main(args)
trainer.fit(model, dm)_____no_output_____
</code>
<code style="color:#792ee5;">
<h1> <strong> Congratulations - Time to Join the Community! </strong> </h1>
</code>
Congratulations on completing this notebook tutorial! If you enjoyed this and would like to join the Lightning movement, you can do so in the following ways!
### Star [Lightning](https://github.com/PyTorchLightning/pytorch-lightning) on GitHub
The easiest way to help our community is just by starring the GitHub repos! This helps raise awareness of the cool tools we're building.
* Please, star [Lightning](https://github.com/PyTorchLightning/pytorch-lightning)
### Join our [Slack](https://join.slack.com/t/pytorch-lightning/shared_invite/zt-f6bl2l0l-JYMK3tbAgAmGRrlNr00f1A)!
The best way to keep up to date on the latest advancements is to join our community! Make sure to introduce yourself and share your interests in `#general` channel
### Interested by SOTA AI models ! Check out [Bolt](https://github.com/PyTorchLightning/pytorch-lightning-bolts)
Bolts has a collection of state-of-the-art models, all implemented in [Lightning](https://github.com/PyTorchLightning/pytorch-lightning) and can be easily integrated within your own projects.
* Please, star [Bolt](https://github.com/PyTorchLightning/pytorch-lightning-bolts)
### Contributions !
The best way to contribute to our community is to become a code contributor! At any time you can go to [Lightning](https://github.com/PyTorchLightning/pytorch-lightning) or [Bolt](https://github.com/PyTorchLightning/pytorch-lightning-bolts) GitHub Issues page and filter for "good first issue".
* [Lightning good first issue](https://github.com/PyTorchLightning/pytorch-lightning/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
* [Bolt good first issue](https://github.com/PyTorchLightning/pytorch-lightning-bolts/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
* You can also contribute your own notebooks with useful examples !
### Great thanks from the entire Pytorch Lightning Team for your interest !
<img src="https://github.com/PyTorchLightning/pytorch-lightning/blob/master/docs/source/_static/images/logo.png?raw=true" width="800" height="200" />_____no_output_____
| {
"repository": "inzouzouwetrust/pytorch-lightning",
"path": "notebooks/04-transformers-text-classification.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 21739,
"hexsha": "d0a150b7a78969d131b4e9e0be42624965bd3123",
"max_line_length": 314,
"avg_line_length": 36.2921535893,
"alphanum_fraction": 0.5508533051
} |
# Notebook from AndersMunkN/PublicData
Path: Football/linear.ipynb
# Beating the betting firms with linear models
* **Data Source:** [https://www.kaggle.com/hugomathien/soccer](https://www.kaggle.com/hugomathien/soccer)
* **Author:** Anders Munk-Nielsen
**Result:** It is possible to do better than the professional betting firms in terms of predicting each outcome (although they may be maximizing profit rather than trying to predict outcomes). This is using a linear model, and it requires us to use a lot of variables, though.
**Perspectives:** We can only model 1(win), but there are *three* outcomes: Lose, Draw, and Win. _____no_output_____
<code>
import pandas as pd
import numpy as np
import statsmodels.formula.api as smf
import seaborn as sns
import matplotlib.pyplot as plt
sns.set_theme()_____no_output_____# Read
d = pd.read_csv('football_probs.csv')
# Data types
d.date = pd.to_datetime(d.date)
cols_to_cat = ['league', 'season', 'team', 'country']
for c in cols_to_cat:
d[c] = d[c].astype('category')_____no_output_____
</code>
Visualizing the home field advantage. _____no_output_____
<code>
sns.histplot(data=d, x='goal_diff', hue='home', discrete=True);
plt.xlim([-7,7]); _____no_output_____
</code>
Outcome variables_____no_output_____
<code>
# Lose, Draw, Win
d['outcome'] = 'L'
d.loc[d.goal_diff == 0.0, 'outcome'] = 'D'
d.loc[d.goal_diff > 0.0, 'outcome'] = 'W'
# Win dummy (as float (will become useful later))
d['win'] = (d.goal_diff > 0.0).astype(float)_____no_output_____
</code>
# Odds to probabilities_____no_output_____### Convenient lists of variable names
* `cols_common`: All variables that are unrelated to betting
* `betting_firms`: The prefix that defines the name of the betting firms, e.g. B365 for Bet365
* `firm_vars`: A dictionary returning the variables for a firm, e.g. `firm_vars['BW']` returns `BWA`, `BWD`, `BWH` (for Away, Draw, Home team win). _____no_output_____
<code>
# # List of the names of all firms that we have betting prices for
betting_firms = np.unique([c[:-4] for c in d.columns if c[-1] in ['A', 'H', 'D']])
betting_firms
# find all columns in our dataframe that are *not* betting variables
cols_common = [c for c in d.columns if (c[-4:-1] != '_Pr') & (c[-9:] != 'overround')]
print(f'Non-odds variables: {cols_common}')Non-odds variables: ['season', 'stage', 'date', 'match_api_id', 'team_api_id', 'goal', 'enemy_team_api_id', 'enemy_goal', 'home', 'goal_diff', 'league', 'country', 'team', 'enemy_team', 'outcome', 'win']
d[d.home].groupby('win')['B365_PrW'].mean().to_frame('Bet 365 Pr(win)')_____no_output_____sns.histplot(d, x='B365_PrW', hue='win'); _____no_output_____
</code>
## Is there more information in the mean?
If all firms are drawing random IID signals, then the average prediction should be a better estimator than any individual predictor. _____no_output_____
<code>
firms_drop = ['BS', 'GB', 'PS', 'SJ'] # these are missing in too many years
cols_prW = [f'{c}_PrW' for c in betting_firms if c not in firms_drop]_____no_output_____d['avg_PrW'] = d[cols_prW].mean(1)
cols_prW += ['avg_PrW']_____no_output_____I = d.win == True
fig, ax = plt.subplots();
ax.hist(d.loc[I,'avg_PrW'], bins=30, alpha=0.3, label='Avg. prediction')
ax.hist(d.loc[I,'B365_PrW'], bins=30, alpha=0.3, label='B365')
ax.hist(d.loc[I,'BW_PrW'], bins=30, alpha=0.3, label='BW')
ax.legend();
ax.set_xlabel('Pr(win) [only matches where win==1]'); _____no_output_____
</code>
### RMSE comparison
* RMSE: Root Mean Squared Error. Whenever we have a candidate prediction guess, $\hat{y}_i$, we can evaluate $$ RMSE = \sqrt{ N^{-1}\sum_{i=1}^N (y_i - \hat{y}_i)^2 }. $$ _____no_output_____
<code>
def RMSE(yhat, y) -> float:
'''Root mean squared error: between yvar and y'''
q = (yhat - y)**2
return np.sqrt(np.mean(q))_____no_output_____def RMSE_agg(data: pd.core.frame.DataFrame, y: str) -> pd.core.series.Series:
'''RMSE_agg: Aggregates all columns, computing RMSE against the variable y for each column
'''
assert y in data.columns
y = data['win']
# local function computing RMSE for a specific column, yvar, against y
def RMSE_(yhat):
diff_sq = (yhat - y) ** 2
return np.sqrt(np.mean(diff_sq))
# do not compute RMSE against the real outcome :)
mycols = [c for c in data.columns if c != 'win']
# return aggregated dataframe (which becomes a pandas series)
return data[mycols].agg(RMSE_)_____no_output_____I = d[cols_prW].notnull().all(1) # only run comparison on subsample where all odds were observed
x_ = RMSE_agg(d[cols_prW + ['win']], 'win');
ax = x_.plot.bar();
ax.set_ylim([x_.min()*.999, x_.max()*1.001]);
ax.set_ylabel('RMSE'); _____no_output_____
</code>
# Linear Probability Models
Estimate a bunch of models where $y_i = 1(\text{win})$. _____no_output_____## Using `numpy` _____no_output_____
<code>
d['home_'] = d.home.astype(float)_____no_output_____I = d[['home_', 'win'] + cols_prW].notnull().all(axis=1)
X = d.loc[I, ['home_'] + cols_prW].values
y = d.loc[I, 'win'].values.reshape(-1,1)
N = I.sum()
oo = np.ones((N,1))
X = np.hstack([oo, X])_____no_output_____betahat = np.linalg.inv(X.T @ X) @ X.T @ y_____no_output_____pd.DataFrame({'beta':betahat.flatten()}, index=['const', 'home'] + cols_prW)_____no_output_____
</code>
## Using `statsmodels`
(Cheating, but faster...) _____no_output_____
<code>
reg_addition = ' + '.join(cols_prW)
model_string = f'win ~ {reg_addition} + home + team'_____no_output_____cols_all = cols_prW + ['win', 'home']
I = d[cols_all].notnull().all(1) # no missings in any variables used in the prediction model
Itrain = I & (d.date < '2015-01-01') # for estimating our prediction model
Iholdout = I & (d.date >= '2015-01-01') # for assessing the model fit _____no_output_____# run regression
r = smf.ols(model_string, d[Itrain]).fit()
yhat = r.predict(d[I]).to_frame('AMN_PrW')
d.loc[I, 'AMN_PrW'] = yhat
print('Estimates with Team FE')
r.params.loc[['home[T.True]'] + cols_prW].to_frame('Beta')Estimates with Team FE
</code>
### Plot estimates, $\hat{\beta}$_____no_output_____
<code>
ax = r.params.loc[cols_prW].plot.bar();
ax.set_ylabel('Coefficient (loading in optimal prediction)');
ax.set_xlabel('Betting firm prediction'); _____no_output_____
</code>
### Plot model fit out of sample: avg. 1(win) vs. avg. $\hat{y}$ _____no_output_____
<code>
# predicted win rates from all firms and our new predicted probability
cols = cols_prW + ['AMN_PrW'] _____no_output_____
</code>
**Home matches:** `home == True`_____no_output_____
<code>
x_ = d.loc[(d.win == 1.0) & (d.home == True) & (Iholdout == True), cols].mean()
ax = x_.plot(kind='bar');
ax.set_ylim([x_.min()*0.995, x_.max()*1.005]);
ax.set_title('Out of sample fit: won matches as Home');
ax.set_xlabel('Betting firm prediction');
ax.set_ylabel('Pr(win) (only won home matches)'); _____no_output_____
</code>
**Away matches:** `home == False`_____no_output_____
<code>
x_ = d.loc[(d.win == 1.0) & (d.home == False) & (Iholdout == True), cols].mean()
ax = x_.plot(kind='bar');
ax.set_ylim([x_.min()*0.995, x_.max()*1.005]);
ax.set_ylabel('Pr(win) (only won away matches)');
ax.set_title('Out of sample fit: won matches as Away'); _____no_output_____
</code>
### RMSE
(evaluated in the holdout sample, of course.)_____no_output_____
<code>
cols_ = cols_prW + ['AMN_PrW', 'win']
I = Iholdout & d[cols_].notnull().all(1) # only run comparison on subsample where all odds were observed
x_ = RMSE_agg(d.loc[I,cols_], y='win');
ax = x_.plot.bar();
ax.set_ylim([x_.min()*.999, x_.max()*1.001]);
ax.set_ylabel('RMSE (out of sample)'); _____no_output_____
</code>
| {
"repository": "AndersMunkN/PublicData",
"path": "Football/linear.ipynb",
"matched_keywords": [
"bwa"
],
"stars": 1,
"size": 177142,
"hexsha": "d0a1d24eaaa85e00fb936d5b59b78f103d09f490",
"max_line_length": 26538,
"avg_line_length": 225.3715012723,
"alphanum_fraction": 0.9085027831
} |
# Notebook from nihal-rao/deepchem
Path: examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb
# Tutorial Part 10: Exploring Quantum Chemistry with GDB1k_____no_output_____Most of the tutorials we've walked you through so far have focused on applications to the drug discovery realm, but DeepChem's tool suite works for molecular design problems generally. In this tutorial, we're going to walk through an example of how to train a simple molecular machine learning for the task of predicting the atomization energy of a molecule. (Remember that the atomization energy is the energy required to form 1 mol of gaseous atoms from 1 mol of the molecule in its standard state under standard conditions).
## Colab
This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.
[](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb)
## Setup
To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment._____no_output_____
<code>
!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py
import conda_installer
conda_installer.install()
!/root/miniconda/bin/conda info -e % Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0
100 3489 100 3489 0 0 37923 0 --:--:-- --:--:-- --:--:-- 37923
!pip install --pre deepchem
import deepchem
deepchem.__version__Requirement already satisfied: deepchem in /usr/local/lib/python3.6/dist-packages (2.4.0rc1.dev20200805143010)
Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)
Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)
Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)
Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)
Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)
Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)
Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)
</code>
With our setup in place, let's do a few standard imports to get the ball rolling._____no_output_____
<code>
import os
import unittest
import numpy as np
import deepchem as dc
import numpy.random
from deepchem.utils.evaluate import Evaluator
from sklearn.ensemble import RandomForestRegressor
from sklearn.kernel_ridge import KernelRidge_____no_output_____
</code>
The ntext step we want to do is load our dataset. We're using a small dataset we've prepared that's pulled out of the larger GDB benchmarks. The dataset contains the atomization energies for 1K small molecules._____no_output_____
<code>
tasks = ["atomization_energy"]
dataset_file = "../../datasets/gdb1k.sdf"
smiles_field = "smiles"
mol_field = "mol"_____no_output_____
</code>
We now need a way to transform molecules that is useful for prediction of atomization energy. This representation draws on foundational work [1] that represents a molecule's 3D electrostatic structure as a 2D matrix $C$ of distances scaled by charges, where the $ij$-th element is represented by the following charge structure.
$C_{ij} = \frac{q_i q_j}{r_{ij}^2}$
If you're observing carefully, you might ask, wait doesn't this mean that molecules with different numbers of atoms generate matrices of different sizes? In practice the trick to get around this is that the matrices are "zero-padded." That is, if you're making coulomb matrices for a set of molecules, you pick a maximum number of atoms $N$, make the matrices $N\times N$ and set to zero all the extra entries for this molecule. (There's a couple extra tricks that are done under the hood beyond this. Check out reference [1] or read the source code in DeepChem!)
DeepChem has a built in featurization class `dc.feat.CoulombMatrixEig` that can generate these featurizations for you._____no_output_____
<code>
featurizer = dc.feat.CoulombMatrixEig(23, remove_hydrogens=False)_____no_output_____
</code>
Note that in this case, we set the maximum number of atoms to $N = 23$. Let's now load our dataset file into DeepChem. As in the previous tutorials, we use a `Loader` class, in particular `dc.data.SDFLoader` to load our `.sdf` file into DeepChem. The following snippet shows how we do this:_____no_output_____
<code>
# loader = dc.data.SDFLoader(
# tasks=["atomization_energy"], smiles_field="smiles",
# featurizer=featurizer,
# mol_field="mol")
# dataset = loader.featurize(dataset_file)_____no_output_____
</code>
For the purposes of this tutorial, we're going to do a random split of the dataset into training, validation, and test. In general, this split is weak and will considerably overestimate the accuracy of our models, but for now in this simple tutorial isn't a bad place to get started._____no_output_____
<code>
# random_splitter = dc.splits.RandomSplitter()
# train_dataset, valid_dataset, test_dataset = random_splitter.train_valid_test_split(dataset)_____no_output_____
</code>
One issue that Coulomb matrix featurizations have is that the range of entries in the matrix $C$ can be large. The charge $q_1q_2/r^2$ term can range very widely. In general, a wide range of values for inputs can throw off learning for the neural network. For this, a common fix is to normalize the input values so that they fall into a more standard range. Recall that the normalization transform applies to each feature $X_i$ of datapoint $X$
$\hat{X_i} = \frac{X_i - \mu_i}{\sigma_i}$
where $\mu_i$ and $\sigma_i$ are the mean and standard deviation of the $i$-th feature. This transformation enables the learning to proceed smoothly. A second point is that the atomization energies also fall across a wide range. So we apply an analogous transformation normalization transformation to the output to scale the energies better. We use DeepChem's transformation API to make this happen:_____no_output_____
<code>
# transformers = [
# dc.trans.NormalizationTransformer(transform_X=True, dataset=train_dataset),
# dc.trans.NormalizationTransformer(transform_y=True, dataset=train_dataset)]
# for dataset in [train_dataset, valid_dataset, test_dataset]:
# for transformer in transformers:
# dataset = transformer.transform(dataset)_____no_output_____
</code>
Now that we have the data cleanly transformed, let's do some simple machine learning. We'll start by constructing a random forest on top of the data. We'll use DeepChem's hyperparameter tuning module to do this._____no_output_____
<code>
# def rf_model_builder(model_params, model_dir):
# sklearn_model = RandomForestRegressor(**model_params)
# return dc.models.SklearnModel(sklearn_model, model_dir)
# params_dict = {
# "n_estimators": [10, 100],
# "max_features": ["auto", "sqrt", "log2", None],
# }
# metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)
# optimizer = dc.hyper.HyperparamOpt(rf_model_builder)
# best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(
# params_dict, train_dataset, valid_dataset, transformers,
# metric=metric)_____no_output_____
</code>
Let's build one more model, a kernel ridge regression, on top of this raw data._____no_output_____
<code>
# def krr_model_builder(model_params, model_dir):
# sklearn_model = KernelRidge(**model_params)
# return dc.models.SklearnModel(sklearn_model, model_dir)
# params_dict = {
# "kernel": ["laplacian"],
# "alpha": [0.0001],
# "gamma": [0.0001]
# }
# metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)
# optimizer = dc.hyper.HyperparamOpt(krr_model_builder)
# best_krr, best_krr_hyperparams, all_krr_results = optimizer.hyperparam_search(
# params_dict, train_dataset, valid_dataset, transformers,
# metric=metric)_____no_output_____
</code>
# Congratulations! Time to join the Community!
Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:
## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)
This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.
## Join the DeepChem Gitter
The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!
# Bibliography:
[1] https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.98.146401_____no_output_____
| {
"repository": "nihal-rao/deepchem",
"path": "examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb",
"matched_keywords": [
"STAR",
"drug discovery"
],
"stars": 1,
"size": 16409,
"hexsha": "d0a2b1a38773b7d10fa6b5be65d8c3a07eb9b685",
"max_line_length": 579,
"avg_line_length": 38.8838862559,
"alphanum_fraction": 0.5605460418
} |
# Notebook from chetanchawla/Intro-to-Astro-2021
Path: Week6_radvel_fitting/Key.ipynb
# Radial Velocity Orbit-fitting with RadVel
## Week 6, Intro-to-Astro 2021
### Written by Ruben Santana & Sarah Blunt, 2018
#### Updated by Joey Murphy, June 2020
#### Updated by Corey Beard, July 2021
## Background information
Radial velocity measurements tell us how the velocity of a star changes along the direction of our line of sight. These measurements are made using Doppler Spectroscopy, which looks at the spectrum of a star and measures shifts in known absorption lines. Here is a nice [GIF](https://polytechexo.files.wordpress.com/2011/12/spectro.gif) showing the movement of a star due to the presence of an orbiting planet, the shift in the stellar spectrum, and the corresponding radial velocity measurements.
This tutorial will cover a lot of new topics and build on ones we just learned. We don't have time to review all of them right now, so you're encouraged to read the following references before coming back to complete the tutorial as one of your weekly assignments.
- [Intro to the Radial Velocity Technique](http://exoplanets.astro.yale.edu/workshop/EPRV/Bibliography_files/Radial_Velocity.pdf) (focus on pgs. 1-6)
- [Intro to Periodograms](https://arxiv.org/pdf/1703.09824.pdf) (focus on pgs. 1-30)
- [Intro to Markov Chain Monte Carlo Methods](https://towardsdatascience.com/a-zero-math-introduction-to-markov-chain-monte-carlo-methods-dcba889e0c50) (link also found in the MCMC resources from the Bayesian fitting methods and MCMC tutorial)
## About this tutorial
In this tutorial, you will use the California Planet Search Python package [RadVel](https://github.com/California-Planet-Search/radvel) to characterize the exoplanets orbiting the star K2-24 (EPIC 203771098) using radial velocity measurements. This tutorial is a modification of the "[K2-24 Fitting & MCMC](https://github.com/California-Planet-Search/radvel/blob/master/docs/tutorials/K2-24_Fitting%2BMCMC.ipynb)" tutorial on the RadVel GitHub page.
There are several coding tasks for you to accomplish in this tutorial. Each task is indicated by a `#TODO` comment.
In this tutorial, you will:
- estimate planetary orbital periods using a periodogram
- perform a maximum likelihood orbit fit with RadVel
- create a residuals plot
- perform a Markov Chain Monte Carlo (MCMC) fit to characterize orbital parameter uncertainty
## Outline
1. RadVel Installation
2. Importing Data
3. Finding Periods
4. Defining and Initializing a Model
5. Maximum Likelihood Fitting
6. Residuals
7. MCMC_____no_output_____## 1. Installation
We will begin by making sure we have all the python packages needed for the tutorial. First, [install RadVel](http://radvel.readthedocs.io/en/latest/quickstartcli.html#installation) by typing:
`pip install radvel` at the command line. (Some warning messages may print out, but I (Corey) was able to install RadVel successfully in a new Anaconda environment using python=3.8.3.)
If you want to clone the entire RadVel GitHub repository for easy access to the RadVel source code, type:
`git clone https://github.com/California-Planet-Search/radvel.git`
If everything installed correctly, the following cell should run without errors. If you still see errors try restarting the kernel by using the tab above labeled **kernel >> restart**._____no_output_____
<code>
# allows us to see plots on the jupyter notebook
%matplotlib inline
# used to interact with operating system
import os
# models used by radvel for calculations, plotting, and model optimization
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy import optimize
# for corner plots
import corner
# for radial velocity analysis
import radvel
from radvel.plot import orbit_plots, mcmc_plots
# for periodogram
from astropy.stats import LombScargle
# sets font size for plots
matplotlib.rcParams['font.size'] = 18_____no_output_____
</code>
## 2. Importing and Plotting Data
When you installed RadVel, some .csv files were placed in a directory on your computer called `radvel.DATADIR`. Let's read this data into Python using pandas._____no_output_____
<code>
# import data
path = os.path.join(radvel.DATADIR,'epic203771098.csv') # path to data file
data = pd.read_csv(path, index_col=0) # read data into pandas DataFrame
print('Path to radvel.DATADIR: {}\n'.format(radvel.DATADIR))
print(data)
# Let's print out the column names of the pandas DataFrame you just created (`data`)
print(data.columns.values)
# TODO: print out the length of `data`
print(len(data))
Path to radvel.DATADIR: /Users/corey/anaconda3/envs/astroconda/radvel_example_data
errvel t vel
0 1.593725 2364.819580 6.959066
1 1.600745 2364.825101 5.017650
2 1.658815 2364.830703 13.811799
3 1.653224 2366.827579 1.151030
4 1.639095 2367.852646 9.389273
5 1.723691 2373.888150 -2.820614
6 1.907690 2374.852412 -0.772991
7 1.709263 2376.863820 -2.222920
8 1.838565 2377.866073 0.146115
9 1.649715 2378.834011 2.739558
10 1.862539 2380.930797 7.571228
11 1.681661 2382.886140 5.137920
12 1.901465 2383.823529 0.368812
13 1.689888 2384.799943 -1.480772
14 1.680251 2384.828991 -2.737295
15 1.718589 2384.839720 -5.682658
16 1.713852 2388.955960 -3.910203
17 1.644495 2395.857258 -5.635432
18 1.760751 2402.898756 3.635211
19 1.649973 2403.771319 3.538690
20 1.460209 2411.755697 -3.754068
21 1.782881 2412.794200 -0.111816
22 1.637531 2420.803019 0.112752
23 1.760237 2421.822804 -2.587854
24 1.656808 2422.742125 3.020029
25 1.982700 2429.761751 -13.033590
26 1.875608 2429.810230 -10.996779
27 1.702684 2432.732316 -12.064872
28 1.913379 2432.807239 -14.867229
29 1.929956 2457.716902 -1.308613
30 1.944350 2457.754804 -5.319776
31 1.617464 2465.710740 4.873121
['errvel' 't' 'vel']
32
# Let's plot time (data.t) vs radial velocity (data.vel) using matplotlib.pyplot
plt.plot(data.t, data.vel, 'o')
# Now, on a new figure, let's modify the plotting code so that it adds error
# bars (data.errvel) to each RV measurement
plt.figure()
plt.errorbar(data.t, data.vel, data.errvel, fmt='o')
plt.show()
plt.errorbar(data.t, data.vel, data.errvel, fmt='o',color='maroon')
# Add labels for the x- and y-axes of your plot (time is in days; radial velocity is in m/s)
plt.xlabel('Time [days]')
plt.ylabel('Velocity [m/s]')
plt.show()
# TODO: change the color of the data in your plot
# TODO: What do you notice about the data? Does it look like there is a planet signal?
# What orbital period would you estimate?
# Enter your answer in the triple quotes below.
"""
It definitely doesn't appear to be a pure sinusoid. This means there could be significant eccentricity, additional planets,
stellar activity, or any number of other possible explanations. The periods look like on the order of ~10-20 days,
or so
"""_____no_output_____
</code>
## 3. Finding a Significant Period
_____no_output_____Now, we will find probable orbital periods using a Lomb-Scargle periodogram. Periodograms are created using a Fourier transform, which is a mathematical process that takes in continuous time-based data and decomposes it into a combination of functions with various frequencies, as seen in the image below. To build more intuition for how a Fourier transform works, checkout this useful [PhET simulation](https://phet.colorado.edu/en/simulation/fourier).

([wikipedia](https://upload.wikimedia.org/wikipedia/commons/6/61/FFT-Time-Frequency-View.png))
The graph on the left is the continous data which is analagous to our radial velocity data. The three sine waves behind the graphs are the functions that are added to produce a good fit to the original data. Finally, the graph on the right is the periodogram. It shows how much each contributing function's frequency contributes to the data model. The larger the peak in the graph, the more significant that frequency is in the data. We use this frequency to get an idea of periodic behaivor in the data (e.g. the orbital period of an exoplanet). Now, we will calculate a periodogram and use it to give us an estimate of the period of the planet's orbit._____no_output_____
<code>
def LombScarg(t,v,e,min_per=0.01,max_per=1000):
#Calculate Generalized Lomb-Scargle periodogram and window function
fmin = 1./max_per
fmax = 1./min_per
frequency, power = LombScargle(t, v, e).autopower(minimum_frequency=1/1000,maximum_frequency=1.,method='cython')
per = 1/frequency
#Identify strongest period.
in_window = np.zeros(len(per),dtype=bool)
for s in range(len(per)):
if per[s] > min_per and per[s] < max_per:
in_window[s] += 1
powmax = max(power[in_window])
imax = np.argmax(power[in_window])
fbest = frequency[in_window][imax]
perbest = 1./fbest
return per, power, perbest_____no_output_____minPer = 30 # min period to look for 1st planet (in days)
maxPer = 50 # max period to look for 1st planet (in days)
period, power, period1 = LombScarg(data.t, data.vel,data.errvel,min_per=minPer,max_per=maxPer)
plt.xlim(1,1000)
plt.axvline(period1,color='red',linestyle='--')
plt.semilogx(period,power)
plt.xlabel('Period (days)')
plt.ylabel('GLS Power')
plt.show()
# TODO: change the values of minPer and maxPer. How do the results change? Why? Type your answer
# between the triple quotes below.
"""
`minPer` and `maxPer` control the period range in which the nyquist searcher looks for significant peaks. Changing
them controls which period the searcher returns (it's returning the maximum peak in the allowable range).
"""_____no_output_____
</code>
## 4. Defining and Initializing Model_____no_output_____Let's define a function that we will use to initialize the ``radvel.Parameters`` and ``radvel.RVModel`` objects.
These will be our initial guesses of the planet parameters based on on the radial velocity measurements shown and periodogram shown above._____no_output_____
<code>
nplanets = 1 # number of planets
def initialize_model():
time_base = 2420.
params = radvel.Parameters(nplanets,basis='per tc secosw sesinw k')
params['per1'] = radvel.Parameter(value=period1) # Insert our guess for period of first planet (from periodogram)
params['tc1'] = radvel.Parameter(value=2080.) # guess for time of transit of 1st planet
params['secosw1'] = radvel.Parameter(value=0.0) # determines eccentricity (assuming circular orbit here)
params['sesinw1'] = radvel.Parameter(value=0.0) # determines eccentriciy (assuming circular orbit here)
params['k1'] = radvel.Parameter(value=3.) # radial velocity semi-amplitude
mod = radvel.RVModel(params, time_base=time_base)
mod.params['dvdt'] = radvel.Parameter(value=-0.02) # possible acceleration of star
mod.params['curv'] = radvel.Parameter(value=0.01) # possible curvature in long-term radial velocity trend
return mod
_____no_output_____
</code>
Fit the K2-24 RV data assuming circular orbits.
Set initial guesses for the parameters:_____no_output_____
<code>
mod = initialize_model() # model initiliazed
like = radvel.likelihood.RVLikelihood(mod, data.t, data.vel, data.errvel, '_HIRES') # initialize Likelihood object
# define initial guesses for instrument-related parameters
like.params['gamma_HIRES'] = radvel.Parameter(value=0.1) # zero-point radial velocity offset
like.params['jit_HIRES'] = radvel.Parameter(value=1.0) # white noise_____no_output_____
</code>
Plot the model with our initial parameter guesses:_____no_output_____
<code>
def plot_results(like):
fig = plt.figure(figsize=(12,4))
fig = plt.gcf()
fig.set_tight_layout(True)
plt.errorbar(
like.x, like.model(data.t.values)+like.residuals(),
yerr=like.yerr, fmt='o'
)
ti = np.linspace(data.t.iloc[0] - 5, data.t.iloc[-1] + 5,100) # time array for model
plt.plot(ti, like.model(ti))
plt.xlabel('Time')
plt.ylabel('RV')
plot_results(like)_____no_output_____
</code>
## 5. Maximum Likelihood fit_____no_output_____Well, that solution doesn't look very good! Let's optimize the parameters set to vary by maximizing the likelihood.
Initialize a ``radvel.Posterior`` object._____no_output_____
<code>
post = radvel.posterior.Posterior(like) # initialize radvel.Posterior object_____no_output_____
</code>
Choose which parameters to change or hold fixed during a fit. By default, all `radvel.Parameter` objects will vary, so you only have to worry about setting the ones you want to hold fixed._____no_output_____
<code>
post.likelihood.params['secosw1'].vary = False # set as false because we are assuming circular orbit
post.likelihood.params['sesinw1'].vary = False # set as false because we are assuming circular orbit
print(like)parameter value vary
per1 43.8487 True
tc1 2080 True
secosw1 0 False
sesinw1 0 False
k1 3 True
dvdt -0.02 True
curv 0.01 True
gamma_HIRES 0.1 True
jit_HIRES 1 True
</code>
Maximize the likelihood and print the updated posterior object_____no_output_____
<code>
res = optimize.minimize(
post.neglogprob_array, # objective function is negative log likelihood
post.get_vary_params(), # initial variable parameters
method='Powell', # Nelder-Mead also works
)
plot_results(like) # plot best fit model
print(post)parameter value vary
per1 49.0164 True
tc1 2080.57 True
secosw1 0 False
sesinw1 0 False
k1 3.81242 True
dvdt -0.0843784 True
curv 0.00152179 True
gamma_HIRES -4.20245 True
jit_HIRES 3.88509 True
Priors
------
</code>
RadVel comes equipped with some fancy ready-made plotting routines. Check this out!_____no_output_____
<code>
matplotlib.rcParams['font.size'] = 12
RVPlot = orbit_plots.MultipanelPlot(post)
RVPlot.plot_multipanel()
matplotlib.rcParams['font.size'] = 18NOTE: This version of radvel has been modified to NOT include jitters in errorbars.
</code>
## 6. Residuals and Repeat
Residuals are the difference of our data and our best-fit model.
Next, we will plot the residuals of our optimized model to see if there is a second planet in our data. When we look at the following residuals, we will see a sinusoidal shape, so another planet may be present! Thus, we will repeat the steps shown earlier (this time using the parameters from the maximum fit for the first planet)._____no_output_____
<code>
residuals1 = post.likelihood.residuals()
# Let's make a plot of data.time versus `residuals1`
plt.figure()
plt.scatter(data.t, residuals1)
plt.xlabel('time [MJD]')
plt.ylabel('RV [m/s]')
plt.show()
# TODO: What do you notice? What would you estimate the period
# of the other exoplanet in this system to be? Write your answer between the triple quotes below.
"""
These residuals appear to go up and down every ~20 days or so. This looks like a more convincing version of the
period we first observed in the original radial velocity data. It's still pretty hard to tell, though! I'm
happy we have algorithms to find orbital periods more effectively than the human eye can.
"""_____no_output_____
</code>
Let's repeat the above analysis with two planets!_____no_output_____
<code>
nyquist = 2 # maximum sampling rate
minPer = 20 # minimum period to look for 2nd planet
maxPer = 30 # max period to look for 2nd planet
# finding 2nd planet period
period, power, period2 = LombScarg(data.t, data.vel, data.errvel, min_per=minPer, max_per=maxPer) # finding possible periords for 2nd planet
period, power, period1 = LombScarg(data.t, data.vel,data.errvel,min_per=minPer,max_per=maxPer)
plt.xlim(1,1000)
plt.axvline(period2,color='red',linestyle='--')
plt.semilogx(period,power)
plt.show()
# TODO: why doesn't the periodogram return the period of the first planet? Write your answer between the triple
# quotes below.
"""
The period of the first planet is not in the allowed period range we specified (`minPer` to `maxPer`).
"""_____no_output_____
</code>
Repeat the RadVel analysis_____no_output_____
<code>
nplanets = 2 # number of planets
def initialize_model():
time_base = 2420
params = radvel.Parameters(nplanets,basis='per tc secosw sesinw k')
# 1st Planet
params['per1'] = post.params['per1'] # period of 1st planet
params['tc1'] = post.params['tc1'] # time transit of 1st planet
params['secosw1'] = post.params['secosw1'] # determines eccentricity (assuming circular orbit here)
params['sesinw1'] = post.params['sesinw1'] # determines eccentricity (assuming circular orbit here)
params['k1'] = post.params['k1'] # velocity semi-amplitude for 1st planet
# 2nd Planet
params['per2'] = radvel.Parameter(value=period2) # Insert our guess for period of second planet (from periodogram)
params['tc2'] = radvel.Parameter(value=2070.)
params['secosw2'] = radvel.Parameter(value=0.0)
params['sesinw2'] = radvel.Parameter(value=0.0)
params['k2'] = radvel.Parameter(value=1.1)
mod = radvel.RVModel(params, time_base=time_base)
mod.params['dvdt'] = radvel.Parameter(value=-0.02) # acceleration of star
mod.params['curv'] = radvel.Parameter(value=0.01) # curvature of radial velocity fit
return mod
_____no_output_____mod = initialize_model() # initialize radvel.RVModel object
like = radvel.likelihood.RVLikelihood(mod, data.t, data.vel, data.errvel, '_HIRES')
like.params['gamma_HIRES'] = radvel.Parameter(value=0.1)
like.params['jit_HIRES'] = radvel.Parameter(value=1.0)_____no_output_____like.params['secosw1'].vary = False # set as false because we are assuming circular orbit
like.params['sesinw1'].vary = False
like.params['secosw2'].vary = False # set as false because we are assuming circular orbit
like.params['sesinw2'].vary = False
print(like)parameter value vary
per1 49.0164 True
tc1 2080.57 True
secosw1 0 False
sesinw1 0 False
k1 3.81242 True
per2 20.5863 True
tc2 2070 True
secosw2 0 False
sesinw2 0 False
k2 1.1 True
dvdt -0.02 True
curv 0.01 True
gamma_HIRES 0.1 True
jit_HIRES 1 True
plot_results(like)_____no_output_____post = radvel.posterior.Posterior(like) # initialize radvel.Posterior object
res = optimize.minimize(
post.neglogprob_array, # objective function is negative log likelihood
post.get_vary_params(), # initial variable parameters
method='Powell', # Nelder-Mead also works
)
plot_results(like) # plot best fit model
print(post)parameter value vary
per1 48.4701 True
tc1 2083.1 True
secosw1 0 False
sesinw1 0 False
k1 4.39734 True
per2 21.126 True
tc2 2069.83 True
secosw2 0 False
sesinw2 0 False
k2 4.7912 True
dvdt -0.0630582 True
curv 0.00152013 True
gamma_HIRES -4.0217 True
jit_HIRES 2.27605 True
Priors
------
matplotlib.rcParams['font.size'] = 12
RVPlot = orbit_plots.MultipanelPlot(post)
RVPlot.plot_multipanel()
matplotlib.rcParams['font.size'] = 18NOTE: This version of radvel has been modified to NOT include jitters in errorbars.
residuals2 = post.likelihood.residuals()
# TODO: make a plot of data.time versus `residuals2`. What do you notice?
# TODO: try redoing the above analysis, but this time, allow the eccentricity parameters to vary during the fit.
# How does the fit change?
plt.figure()
plt.scatter(data.t, residuals2)
plt.xlabel('time [MJD]')
plt.ylabel('RV [ms$^{-1}$]')
# Here's the original residuals plot, for comparison purposes:
plt.figure()
plt.scatter(data.t, residuals1, color='red')
plt.xlabel('time [MJD]')
plt.ylabel('RV [ms$^{-1}$]')
"""
The residuals perhaps look a little more randomly distributed than before, but again it's pretty hard to tell
without a periodogram.
"""
"""
The easiest way to do this is to rerun the analysis, except whenever you see a line that says secosw1 = False,
or sesinw1 = False, or secosw2 = False, or sesinw2 = False, you change them to True.
Be careful not to let the model go too crazy with eccentricity, try giving them initial guesses of 0.1.
The planet RV signatures look more angular (less purely sinusoidal) now that they have a non-zero eccentricity.
The data appears to be better-fit by an eccentric orbit model (i.e. the planets probably do have non-negligible
eccentricities).
"""_____no_output_____
</code>
K2-24 only has two known exoplanets so will stop this part of our analysis here. However, when analzying an uncharacterized star system, it's important to continue the analysis until we see no significant reduction in the residuals of the radial velocity. _____no_output_____# 7. Markov Chain Monte Carlo (MCMC)
After reading the intro to MCMC blog post at the beginning of this tutorial, you are an expert on MCMC! Write a 3-sentence introduction to this section yourself.
MCMC is a method of exploring the parameter space of probable orbits using random walks, i.e. randomly changing the parameters of the fit. MCMC is used to find the most probable orbital solution and to determine the uncertainty (error bars) in the fit. MCMC tells you the probability distributions of orbital parameters consistent with the data._____no_output_____
<code>
# TODO: edit the Markdown cell immediately above this one with a 3 sentence description of the MCMC method.
# What does MCMC do? Why do you think it is important to use MCMC to characterize uncertainties in radial
# velocity fits?_____no_output_____
</code>
Let's use RadVel to perform an MCMC fit:_____no_output_____
<code>
df = radvel.mcmc(post, nwalkers=50, nrun=1000)
# TODO: What type of data structure is `df`, the object returned by RadVel's MCMC method?
"""
It is a pandas dataframe
"""20000/400000 (5.0%) steps complete; Running 15855.08 steps/s; Mean acceptance rate = 48.2%; Min Auto Factor = 19; Max Auto Relative-Change = inf; Min Tz = 6614.6; Max G-R = 1.004
Discarding burn-in now that the chains are marginally well-mixed
400000/400000 (100.0%) steps complete; Running 15030.55 steps/s; Mean acceptance rate = 37.4%; Min Auto Factor = 39; Max Auto Relative-Change = 0.0724; Min Tz = 1595.8; Max G-R = 1.018
MCMC: WARNING: chains did not pass convergence tests. They are likely not well-mixed.
</code>
Make a fun plot!_____no_output_____
<code>
Corner = mcmc_plots.CornerPlot(post, df)
Corner.plot()
# TODO: There is a lot going on in this plot. What do you think the off-diagonal boxes are showing?
# What about the on-diagonal boxes? What is the median period of the first planet?
# What is the uncertainty on the period of the first planet? The second planet?
# TODO: Why do you think the uncertainties on the periods of planets b and c are different?
"""
The off-diagonal boxes are 1 dimensional probability distributions over each of the parameters of the fit.
The on-diagonal boxes show 2 dimensional probability distributions (covariances) between pairs of parameters
(the box's row and column show the parameters it corresponds to).
The median period of the first plot (for my eccentric fit) is 52.56 days. The uncertainty is +0.08 days, -0.07 days
(this corresponds to a *68% confidence interval* of [52.49, 52.64] days.)
The median period of the second planet is 20.69 days, with an uncertainty of +/- 0.02 days.
The uncertainties of the two orbital periods are different because the period of the second planet is much better
constrained by the data than the period of the first planet. We see many periods of the second planet repeated
over the ~100 day dataset, but only ~2 periods of the first planet.
"""_____no_output_____
</code>
| {
"repository": "chetanchawla/Intro-to-Astro-2021",
"path": "Week6_radvel_fitting/Key.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 108,
"size": 699232,
"hexsha": "d0a390d7fcb26cb542261cac9be601677594e100",
"max_line_length": 312992,
"avg_line_length": 591.5668358714,
"alphanum_fraction": 0.9423939408
} |
# Notebook from eddyvdaker/Systematic-Mapping-Review-Dataecosystems
Path: visualisation.ipynb
<code>
# Imports
import matplotlib.pyplot as plt
import json_____no_output_____# Load data from result files
results_file = './results/results_5.json'
summary_file = './results/summary.json'
results = json.load(open(results_file))['results']
summary = json.load(open(summary_file))
def autolabel(rects, label_pos=0):
"""
Generate labels to show values on top of bar charts
:param rects: <pyplot.object> The current pyplot figure
:param label_pos: <float> OR <int> The amount of offset compared to the height of the bar
"""
for rect in rects:
height = rect.get_height()
plt.text(rect.get_x() + rect.get_width()/2., height + label_pos, f'{int(height)}', ha='center', va='bottom')_____no_output_____# Visualization for number of articles per category
categories = summary['occurrences per category']
total_results = sum(categories.values())
categories.pop('Generic', None)
categories.pop('Not about data ecosystems', None)
categories.pop('Systematic Review', None)
total_categorized_results = sum(categories.values())
total_uncategorized_results = total_results - total_categorized_results
print(f'total: {total_results}\n' \
f'catogorized: {total_categorized_results}\n' \
f'uncategorized: {total_uncategorized_results}\n')
labels = list(categories.keys())
values = list(categories.values())
# Pie chart
plt.pie(values, labels=labels, autopct='%1.1f%%', startangle=230)
plt.axis('equal')
plt.show()
# Same data in bar chart form
fig = plt.bar(range(len(categories)), values, align='center')
autolabel(fig, -0.8)
plt.xticks(range(len(categories)), labels, rotation=45, ha='right')
plt.xlabel('Fields')
plt.ylabel('Studies published')
plt.show()
# Same charts but this time with the science fields combined
categories_combined = categories
categories_combined['Science'] += categories_combined.pop('Biology (science)')
categories_combined['Science'] += categories_combined.pop('Neuroscience')
labels_combined = list(categories_combined.keys())
values_combined = list(categories_combined.values())
plt.pie(values_combined, labels=labels_combined, autopct='%1.1f%%', startangle=90)
plt.axis('equal')
plt.show()
# Bar chart with science fields combined
fig = plt.bar(range(len(categories_combined)), values_combined, align='center')
autolabel(fig, -0.8)
plt.xticks(range(len(categories_combined)), labels_combined, rotation=45, ha='right')
plt.xlabel('Fields')
plt.ylabel('Studies published')
plt.show()total: 68
catogorized: 29
uncategorized: 39
# Visualization of the number of articles published per year
publish_years = {}
for result in results:
year = result['publish_date'][0:4]
if year in publish_years.keys():
publish_years[year] += 1
else:
publish_years.update({year: 1})
key_list = sorted(list(publish_years.keys()))
value_list = [publish_years[x] for x in key_list]
# It shows a drop in 2018 because the year has just started, this gives
# a wrong idea of the number of studies about the subject
fig = plt.bar(range(len(value_list)), value_list, align='center')
autolabel(fig, -1.5)
plt.xticks(range(len(key_list)), key_list, rotation=45, ha='right')
plt.xlabel('Publish year')
plt.ylabel('Studies published')
plt.show()
# Plot with 2018 removed from the results
key_list = key_list[:-1]
value_list = value_list[:-1]
fig = plt.bar(range(len(value_list)), value_list, align='center')
autolabel(fig, -1.5)
plt.xticks(range(len(key_list)), key_list, rotation=45, ha='right')
plt.xlabel('Publish year')
plt.ylabel('Studies published')
plt.show()_____no_output_____# Show the occurrences of each of the search terms
search_terms = summary['search terms']
labels = list(search_terms.keys())
values = list(search_terms.values())
fig = plt.bar(range(len(values)), values, align='center')
autolabel(fig, -3)
plt.xticks(range(len(labels)), labels, rotation=45, ha='right')
plt.show()_____no_output_____# Check qualitycriteria
in_title = []
in_abstract = []
term = 'data ecosystem'
for result in results:
if term in result['title'].lower():
in_title.append(result['id'])
if term in result['abstract'].lower():
in_abstract.append(result['id'])
print(f'Results with {term} in title: {in_title}')
print(f'Results with {term} in abstract: {in_abstract}')
in_both = [x for x in in_title if x in in_abstract]
print(f'\nResults with {term} in both title and abstract: {in_both}')
in_single = [x for x in in_abstract]
for result in in_title:
in_single.append(result)
in_single = sorted([x for x in in_single if x not in in_both])
print(f'\nResults with {term} only in either title or abstract: {in_single}')Results with data ecosystem in title: [0, 1, 2, 3, 4, 6, 7, 9, 10, 12, 13, 14, 15, 16, 17, 18, 19, 23, 25, 31, 50, 54, 56, 58]
Results with data ecosystem in abstract: [0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 51, 52, 53, 54, 55, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67]
Results with data ecosystem in both title and abstract: [0, 1, 3, 6, 7, 9, 10, 12, 13, 14, 16, 18, 23, 25, 54]
Results with data ecosystem only in either title or abstract: [2, 4, 5, 8, 11, 15, 17, 19, 20, 21, 22, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67]
</code>
| {
"repository": "eddyvdaker/Systematic-Mapping-Review-Dataecosystems",
"path": "visualisation.ipynb",
"matched_keywords": [
"biology",
"neuroscience"
],
"stars": null,
"size": 133335,
"hexsha": "d0a3d4fd1b62a64de052b6913e0b07e3fe368cc3",
"max_line_length": 29018,
"avg_line_length": 427.3557692308,
"alphanum_fraction": 0.9291408857
} |
# Notebook from surya2365/ds-seed
Path: Regression/Linear Models/ElasticNet_RobustScaler.ipynb
# ElasticNet with RobustScaler
_____no_output_____**This Code template is for the regression analysis using a ElasticNet Regression and the feature rescaling technique RobustScaler in a pipeline**_____no_output_____### Required Packages_____no_output_____
<code>
import warnings as wr
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import RobustScaler
from sklearn.model_selection import train_test_split
from sklearn.linear_model import ElasticNet
from sklearn.metrics import mean_squared_error, r2_score,mean_absolute_error
wr.filterwarnings('ignore')_____no_output_____
</code>
### Initialization
Filepath of CSV file_____no_output_____
<code>
#filepath
file_path= ""_____no_output_____
</code>
List of features which are required for model training ._____no_output_____
<code>
#x_values
features=[]_____no_output_____
</code>
Target feature for prediction._____no_output_____
<code>
#y_value
target=''_____no_output_____
</code>
### Data Fetching
Pandas is an open-source, BSD-licensed library providing high-performance, easy-to-use data manipulation and data analysis tools.
We will use panda's library to read the CSV file using its storage path.And we use the head function to display the initial row or entry._____no_output_____
<code>
df=pd.read_csv(file_path) #reading file
df.head()#displaying initial entries_____no_output_____print('Number of rows are :',df.shape[0], ',and number of columns are :',df.shape[1])Number of rows are : 1338 ,and number of columns are : 7
df.columns.tolist()
_____no_output_____
</code>
### Data Preprocessing
Since the majority of the machine learning models in the Sklearn library doesn't handle string category data and Null value, we have to explicitly remove or replace null values. The below snippet have functions, which removes the null value if any exists. And convert the string classes data in the datasets by encoding them to integer classes.
_____no_output_____
<code>
def NullClearner(df):
if(isinstance(df, pd.Series) and (df.dtype in ["float64","int64"])):
df.fillna(df.mean(),inplace=True)
return df
elif(isinstance(df, pd.Series)):
df.fillna(df.mode()[0],inplace=True)
return df
else:return df
def EncodeX(df):
return pd.get_dummies(df)_____no_output_____
</code>
#### Correlation Map
In order to check the correlation between the features, we will plot a correlation matrix. It is effective in summarizing a large amount of data where the goal is to see patterns._____no_output_____
<code>
plt.figure(figsize = (15, 10))
corr = df.corr()
mask = np.triu(np.ones_like(corr, dtype = bool))
sns.heatmap(corr, mask = mask, linewidths = 1, annot = True, fmt = ".2f")
plt.show()_____no_output_____correlation = df[df.columns[1:]].corr()[target][:]
correlation_____no_output_____
</code>
### Feature Selections
It is the process of reducing the number of input variables when developing a predictive model. Used to reduce the number of input variables to both reduce the computational cost of modelling and, in some cases, to improve the performance of the model.
We will assign all the required input features to X and target/outcome to Y._____no_output_____
<code>
#spliting data into X(features) and Y(Target)
X=df[features]
Y=df[target] _____no_output_____
</code>
Calling preprocessing functions on the feature and target set._____no_output_____
<code>
x=X.columns.to_list()
for i in x:
X[i]=NullClearner(X[i])
X=EncodeX(X)
Y=NullClearner(Y)
X.head()_____no_output_____
</code>
### Data Splitting
The train-test split is a procedure for evaluating the performance of an algorithm. The procedure involves taking a dataset and dividing it into two subsets. The first subset is utilized to fit/train the model. The second subset is used for prediction. The main motive is to estimate the performance of the model on new data._____no_output_____
<code>
#we can choose randomstate and test_size as over requerment
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2, random_state = 1) #performing datasplitting_____no_output_____
</code>
## Model
### Data Scaling
**Used RobustScaler**
* It scales features using statistics that are robust to outliers.
* This method removes the median and scales the data in the range between 1st quartile and 3rd quartile. i.e., in between 25th quantile and 75th quantile range. This range is also called an Interquartile range.
### ElasticNet
Elastic Net first emerged as a result of critique on Lasso, whose variable selection can be too dependent on data and thus unstable. The solution is to combine the penalties of Ridge regression and Lasso to get the best of both worlds.
**Features of ElasticNet Regression-**
* It combines the L1 and L2 approaches.
* It performs a more efficient regularization process.
* It has two parameters to be set, λ and α.
#### Model Tuning Parameters
1. alpha : float, default=1.0
> Constant that multiplies the penalty terms. Defaults to 1.0. See the notes for the exact mathematical meaning of this parameter. alpha = 0 is equivalent to an ordinary least square, solved by the LinearRegression object. For numerical reasons, using alpha = 0 with the Lasso object is not advised. Given this, you should use the LinearRegression object.
2. l1_ratio : float, default=0.5
> The ElasticNet mixing parameter, with 0 <= l1_ratio <= 1. For l1_ratio = 0 the penalty is an L2 penalty. For l1_ratio = 1 it is an L1 penalty. For 0 < l1_ratio < 1, the penalty is a combination of L1 and L2.
3. normalize : bool, default=False
>This parameter is ignored when fit_intercept is set to False. If True, the regressors X will be normalized before regression by subtracting the mean and dividing by the l2-norm. If you wish to standardize, please use StandardScaler before calling fit on an estimator with normalize=False.
4. max_iter : int, default=1000
>The maximum number of iterations.
5. tol : float, default=1e-4
>The tolerance for the optimization: if the updates are smaller than tol, the optimization code checks the dual gap for optimality and continues until it is smaller than tol.
6. selection : {‘cyclic’, ‘random’}, default=’cyclic’
>If set to ‘random’, a random coefficient is updated every iteration rather than looping over features sequentially by default. This (setting to ‘random’) often leads to significantly faster convergence especially when tol is higher than 1e-4.
_____no_output_____
<code>
#training the ElasticNet
Input=[("scaler",RobustScaler()),("model",ElasticNet(random_state = 5))]
model = Pipeline(Input)
model.fit(X_train,y_train)_____no_output_____
</code>
#### Model Accuracy
score() method return the mean accuracy on the given test data and labels.
In multi-label classification, this is the subset accuracy which is a harsh metric since you require for each sample that each label set be correctly predicted._____no_output_____
<code>
print("Accuracy score {:.2f} %\n".format(model.score(X_test,y_test)*100))Accuracy score 48.36 %
#prediction on testing set
prediction=model.predict(X_test)_____no_output_____
</code>
### Model evolution
**r2_score:** The r2_score function computes the percentage variablility explained by our model, either the fraction or the count of correct predictions.
**MAE:** The mean abosolute error function calculates the amount of total error(absolute average distance between the real data and the predicted data) by our model.
**MSE:** The mean squared error function squares the error(penalizes the model for large errors) by our model._____no_output_____
<code>
print('Mean Absolute Error:', mean_absolute_error(y_test, prediction))
print('Mean Squared Error:', mean_squared_error(y_test, prediction))
print('Root Mean Squared Error:', np.sqrt(mean_squared_error(y_test, prediction)))Mean Absolute Error: 6195.156634885755
Mean Squared Error: 77088496.26589973
Root Mean Squared Error: 8780.00548211103
print("R-squared score : ",r2_score(y_test,prediction))R-squared score : 0.48360017436142344
#ploting actual and predicted
red = plt.scatter(np.arange(0,80,5),prediction[0:80:5],color = "red")
green = plt.scatter(np.arange(0,80,5),y_test[0:80:5],color = "green")
plt.title("Comparison of Regression Algorithms")
plt.xlabel("Index of Candidate")
plt.ylabel("target")
plt.legend((red,green),('ElasticNet', 'REAL'))
plt.show()
_____no_output_____
</code>
### Prediction Plot¶
First, we make use of a plot to plot the actual observations, with x_train on the x-axis and y_train on the y-axis. For the regression line, we will use x_train on the x-axis and then the predictions of the x_train observations on the y-axis._____no_output_____
<code>
plt.figure(figsize=(10,6))
plt.plot(range(20),y_test[0:20], color = "green")
plt.plot(range(20),model.predict(X_test[0:20]), color = "red")
plt.legend(["Actual","prediction"])
plt.title("Predicted vs True Value")
plt.xlabel("Record number")
plt.ylabel(target)
plt.show()_____no_output_____
</code>
#### Creator: Vipin Kumar , Github: [Profile](https://github.com/devVipin01)_____no_output_____
| {
"repository": "surya2365/ds-seed",
"path": "Regression/Linear Models/ElasticNet_RobustScaler.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 104890,
"hexsha": "d0a3ed150320ba75b8159144d6be867678f2ab17",
"max_line_length": 48845,
"avg_line_length": 131.2765957447,
"alphanum_fraction": 0.8553913624
} |
# Notebook from MultiplexDX/B117-RT-qPCR-design
Path: B1117.ipynb
### B.1.1.7
#### The selection of suitable loci (done on Tue 22. Dec. 2020)
To identify suitable targets for primer/probe design, we downloaded 1,136 sequences from the GISAID repository filtered during a collection time spanning 1 - 21 December 2020. We focused on the spike gene because lineage B.1.1.7 contains a number of spike gene mutations, including two deletions (ΔH69/ΔV70 and ΔY144) that we focused on for designing a specific assay.
I cut the locus encoding the spike protein and used the *MAFFT* alignment tool (with the parameter - auto) to align all the sequences against the WUHAN reference (NCBI ID: NC_045512.2). _____no_output_____
<code>
%%bash
# "msa_1221.fasta" is a pre-filtered nucleotide MSA file downladed from the GISAID repository 22.12.2020
# the WUHAN reference is always used as the first sequence in the GISAID MSA files
grep -m 1 ">" msa_1221.fasta | cut -d">" -f2 > sars2_allSeqs_til21stDec2020_andRefWuhan.list
# to reduce computational time, I used only sequences collected in Dec 2020
# getting unique sequence IDs
grep -P "2020-12-" msa_1221.fasta | cut -d">" -f2 >> sars2_allSeqs_til21stDec2020_andRefWuhan.list
# star-end positions of the spike protein in the aligned WUHAN sequence: 22412-26369
# I called the spike locus of all sequences listed in "sars2_allSeqs_til21stDec2020_andRefWuhan.list"
count=$(wc -l sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d" " -f1)
for ((i=1; i<$(($count+1)); i++))
do
ID=$(sed -n ''$i'p' sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d" " -f1)
echo ">"$ID >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa
grep -A 1 -m 1 $ID msa_1221.fasta | grep -v ">" | cut -c22412-26369 | tr -d '-' | tr -d '\n' | tr -d ' ' >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa
echo "" >> sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa
done
# using 4 CPUS, I run the mafft tool with default settings
mafft --thread 4 --auto sars2_allSeqs_til21stDec2020_andRefWuhan_Spike.fa > sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa_____no_output_____
</code>
#### Downstream analysis
Twelve sequences (1.06 %) contained ambiguous signal in the loci of deletions and were not used in the downstream analysis. We separated sequences into two groups: 1) those with the ΔH69/ΔV70 and ΔY144 deletions and 2) those without the deletions (Table 1). Using *SeaView*, we called 95 % consensus sequences for the ΔH69/ΔV70 and ΔY144 group and the No deletions group that were used to design primer and probe sets specific to either B.1.1.7 or all other SARS-CoV-2 variants, respectively. _____no_output_____
<code>
%%bash
# quality checks of bases in the deleted loci (ΔH69/ΔV70 and ΔY144)
# if a called base has ambiguous character, it is denotes as N
count=$(wc -l sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d" " -f1)
for ((i=1; i<$(($count+1)); i++))
do
ID=$(sed -n ''$i'p' sars2_allSeqs_til21stDec2020_andRefWuhan.list | cut -d"|" -f2)
Del69_70=$(awk ' BEGIN {RS=">"}; /'$ID'\|/ { print ">"$0 } ' sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa | grep -v ">" | tr -d '\n' | tr -d ' ' | cut -c203-208)
Del144=$(awk ' BEGIN {RS=">"}; /'$ID'\|/ { print ">"$0 } ' sars2_allSeqs_til21stDec2020_andRefWuhan_Spike_mafft.fa | grep -v ">" | tr -d '\n' | tr -d ' ' | cut -c428-430)
# using the output file, we can also compute the correlation of two deletions (ΔH69/ΔV70 and ΔY144) and to judge about their co-occurrence
echo -e $ID"\t"$Del69_70"\t"$Del144 >> sars2_1stDec20202_21stDec20202_Spike_Qchecks.tsv
# The shorter deletion (ΔY144) always co-occurred with the longer deletion (ΔH69/ΔV70), whereas the (ΔH69/ΔV70) deletion occurs independently in 17 sequences (1.5 %).
# Pearson's correlation coefficient of the deletions is 0.953. _____no_output_____
</code>
#### Quality checks of the selected primer/probe loci (done on Thu 4. Febr. 2021)
In a separate analysis to determine the prevalence of the ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, we downloaded 416,778 spike protein sequences with the most recent data description file collected from the beginning of the pandemic through 29 January 2021. Using regular expressions (bash pattern matching command grep with the option -P for Perl-compatible regular expression), we searched for loci with both ΔH69/ΔV70 and ΔY144 deletions, and for loci without these deletions. In the regular expression, we kept fixed a few amino acids downstream and upstream from the deletions to omit any miscalling of the searched pattern.
#### Quality checks of the selected primer/probe loci (update: 2. March 2021)
In a separate analysis to determine the prevalence of the ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, we downloaded 633,137 spike protein sequences with the most recent data description file collected from the beginning of the pandemic through 2 March 2021. Using regular expressions (bash pattern matching command grep with the option -P for Perl-compatible regular expression), we searched for loci with both ΔH69/ΔV70 and ΔY144 deletions and for loci without these deletions. In the regular expression, we kept fixed a few amino acids downstream and upstream from the deletions to omit any miscalling of the searched pattern. _____no_output_____
<code>
# update for the data; datasets from 1st of March 2021 (download on Tue 2nd March 2021)
%%bash
# "Spike_proteins_0301.fasta" is a pre-filtered amino-acid MSA file downladed from the GISAID repository 22.12.2020
grep -c ">" Spike_proteins_0301.fasta
# detection of both deletions (no HV, no Y): d69d70 and d144; with the check for unique sequence IDs
grep -B1 -P "HAISGT.{66}FLGVYHK" Spike_proteins_0301.fasta | grep ">" | cut -d"/" -f2 | sort | uniq -c | awk ' { print $1"\t"$2} ' | wc -l
# full pattern (HV and Y), no deletion; with the check for unique sequence IDs
grep -B1 -P "HAIHVSGT.{66}FLGVYYHK" Spike_proteins_0301.fasta | grep ">" | cut -d"/" -f2 | sort | uniq -c | awk ' { print $1"\t"$2} ' | wc -l
# only d144 (only HV, no Y); with the check for unique sequence IDs
grep -B1 -P "HAIHVSGT.{66}FLGVYHK" Spike_proteins_0301.fasta | grep ">" | cut -d"/" -f2 | sort | uniq -c | awk ' { print $1"\t"$2} ' | wc -l
# only d69d70 (only Y, no HV); with the check for unique sequence IDs
grep -B1 -P "HAISGT.{66}FLGVYYHK" Spike_proteins_0301.fasta | grep ">" | cut -d"/" -f2 | sort | uniq -c | awk ' { print $1"\t"$2} ' | wc -l
# detection of both deletions (no HV, no Y): d69d70 and d144; with the time-dependent sorting
grep -B1 -P "HAISGT.{66}FLGVYHK" Spike_proteins_0301.fasta | grep ">" | grep -oP "\|202[01]-..-" | sort | uniq -c
# only d69d70 (only Y, no HV); with the time-dependent sorting
grep -B1 -P "HAISGT.{66}FLGVYYHK" Spike_proteins_0301.fasta | grep ">" | grep -oP "\|202[01]-..-" | sort | uniq -c
# call the whole metadata information about sars-cov-2 records with detected both deletions
count=$(wc -l Spike_proteins_0301.fasta | cut -d" " -f1)
echo $count
# $count/16=6473
myF(){
for ((i=1; i<6474; i++))
do
N=$((12946*$1 + $i))
ID=$(sed -n ''$N'p' B117_IDs.list)
# metadata_2021-03-01_09-16.tsv
awk -v ID=$ID 'BEGIN{FS="\t"}; { if ( $3 == ID && $15 == "Human" ) { print $1"\t"$3"\t"$7"\t"$18"\t"$19 }} ' metadata_2021-03-01_09-16.tsv >> "B117_search_"$i.csv
done
}
export -f myF
# 12 946
parallel -j 16 myF ::: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
cat "B117_search_"*.csv > "B117_detected_bothMutations_cladeID.csv
rm "B117_search_"*.csv_____no_output_____
</code>
#### Results (done on Thu 4. Febr. 2021)
Our analysis of the prevalence of both ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7, revealed a total of 29,872 sequences that possess both deletions, while 368,474 sequences do not have them. Based on the metadata file, we identified SARS-CoV-2 lineages across all called sequences with both deletions. Only five sequences (0.0167 %) out of 29,872 records are not labelled as B.1.1.7, highlighted the notion that these two deletions are highly specific for the B.1.1.7 variant and make ideal targets for primer/probe design.
#### Results (update: 2. March 2021)
Analysis of the prevalence of both ΔH69/ΔV70 and ΔY144 deletions in lineages other than B.1.1.7 revealed a total of 103,529 sequences that possess both deletions. Based on the metadata file, we identified SARS-CoV-2 lineages across all called sequences with both deletions. Only 108 sequences (0.10%) out of 103,529 sequences are not labelled as B.1.1.7. In other words, 99.90% of sequences containing both deletions belong to lineage B.1.1.7, highlighting the notion that these two deletions are highly specific for the B.1.1.7 variant and make ideal targets for primer/probe design (see the table below, please).
| Clade (Nextstrain) | Total sequences containing both ΔH69/ΔV70 and ΔY144 | % sequences containing both ΔH69/ΔV70 and ΔY144 |
|:------------------:|:---------------------------------------------------:|:-----------------------------------------------:|
|19A|6|<0.01%|
|20A|36|0.03%|
|20A.EU2|22|0.02%|
|20B|21|0.02%|
|20C|6|<0.01%|
|20E.EU1|13|0.01%|
|20I/501Y.V1 (**B.1.1.7**)|103,421|**99.90%**|
|No ID|4|<0.01%|
|Total|103,529|100%|
_____no_output_____
| {
"repository": "MultiplexDX/B117-RT-qPCR-design",
"path": "B1117.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 11523,
"hexsha": "d0a4b9db6e193b996540f6f44ec78222c5b7d19f",
"max_line_length": 660,
"avg_line_length": 57.615,
"alphanum_fraction": 0.6359455003
} |
# Notebook from athenian-ct-projects/Concert-Prep-Day-JL
Path: Concert_Prep_artist_monopoly_JL.ipynb
<a href="https://colab.research.google.com/github/athenian-ct-projects/Concert-Prep-Day-JL/blob/master/Concert_Prep_artist_monopoly_JL.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____Artist-themed Monopoly for Concert Prep Day
Jack L. '23_____no_output_____
<code>
#art fact function
def fact():
import random
x = random.randint(1,10)
if x == 1:
print("Bob Ross's 'Joy of Painting' TV series lasted for 31 seasons")
if x == 2:
print("In 1911 when the Mona Lisa was stolen from the Louvre, Pablo Picasso was one of the two primary suspects in the investigation \nbefore it was found out that an employee did it")
if x == 3:
print("Salvador Dalí thinks that he is a re-incarnation of his brother that died before he was born")
if x == 4:
print("Vincent van Gogh only ever sold one painting in his life")
if x == 5:
print("'The Last Supper' by Leonardo da Vinci originally featured Jesus's feet, but in 1652 when installing a door in the refectory where the painting is, \nthe feet were cut off")
if x == 6:
print("Vincent van Gogh's painting 'The Starry Night' is the view from a psychiatric hospital in France where van Gogh was staying when he painted it")
if x == 7:
print("The marble that was used for Michelangelo's 'David' was used by two other sculptors before Michelangelo")
if x == 8:
print("There are five versions of Edvard Munch’s 'The Scream'")
if x == 9:
print("Auguste Rodin’s 'The Thinker' originally was only 70cm until he later made an enlarged version")
if x == 10:
print("Andy Warhol's Campbell's Soup cans came in a set of thirty-two cans")
#Rainbow paint bucket function
def paint():
import random
y = random.randint(1,10)
if y == 1:
print("HOORAY, advance to go collect $200")
if y == 2:
print("You commited tax fraud - go to jail. If you pass go do not collect $200.")
if y == 3:
print("You are a guest star on a game show. Collect $100 from the bank.")
if y == 4:
print("You drink a Sprite cranbery. Suddenly your door falls down and Lebron James walks in and hands you a fat stack of cash. Collect $500 from the bank.")
if y == 5:
print("Some guy blows up your house with a grenade launcher like in John Wick 2. Pay the bank $200.")
if y == 6:
print("The Great Depression happens again and your bank fails. Pay the bank all of your money (you can mortgage your artists to avoid going bankrupt).")
if y == 7:
print("You get in a car crash while wearng a VR headset and playing a flight simulator in the car, saying 'it will be like I am flying in a plane'. \nPay the bank $200 in medical fees")
if y == 8:
print("Your grandfather dies and he leaves you an inheritance. You assume his massive debt and pay the bank $500.")
if y == 9:
print("Your favorite NFL team wins the Super Bowl! Pay the bank $50 for the jersey you bought.")
if y == 10:
print("You win the lottery but spend it all on worthless stuff. Roll the dice again")
#Instructions
print("Welcome to Artist Monopoly!")
print("This is just like regular monopoly but with some IMPORTANT twists:")
print("To roll dice just ask siri to roll a pair of dice")
print("*there are more spaces, and railroads have been replaced with more modern airlines")
print("*there are auction spaces now. If you land on one you can buy any artist on the board but you have to pay double (only one artist each time you land on the spot).")
print("*trading artists for money and other artists are encoureged but you can only propose a trade on your turn.")
print("*chance spaces have been replaced by artist facts. If you land on that space, type the word 'fact' into the computer.")
print("*community chests have been replaced by rainbow paint buckets. If you land on that space, type the word 'paint' into the computer.\n")
print("IMPORTANT: When someone goes bankrupt, type the word 'player' into the computer.\n")
player = int(input("How many people are playing today? "))
print("Alright you're ready to play. Everyone starts with $1500")
#Gameplay while loop
while player > 1:
tip = input()
if tip == "fact":
fact()
elif tip == "paint":
paint()
elif tip == "player":
player = player - 1
else:
print("You must have spelled somthing wrong. Try again.")
#Final score calculating
print("Looks like we have a winner! Now lets calculate your final score.")
pig = int(input("How much money did the winner have in the end? "))
fig = 0
for z in range(1,pig):
fig = fig + z
print("The final score of the winner is:")
print(fig)
print("Thanks for playing!")Welcome to Artist Monopoly!
This is just like regular monopoly but with some IMPORTANT twists:
To roll dice just ask siri to roll a pair of dice
*there are more spaces, and railroads have been replaced with more modern airlines
*there are auction spaces now. If you land on one you can buy any artist on the board but you have to pay double (only one artist each time you land on the spot).
*trading artists for money and other artists are encoureged but you can only propose a trade on your turn.
*chance spaces have been replaced by artist facts. If you land on that space, type the word 'fact' into the computer.
*community chests have been replaced by rainbow paint buckets. If you land on that space, type the word 'paint' into the computer.
IMPORTANT: When someone goes bankrupt, type the word 'player' into the computer.
</code>
https://www.bocadolobo.com/blog/art/famous-artists-time/
https://www.mentalfloss.com/article/5838715-things-you-didnt-know-about-famous-art
https://medium.com/@inna_13021/taxes-youll-have-to-pay-when-purchasing-or-selling-art-a418b958c457_____no_output_____
| {
"repository": "athenian-ct-projects/Concert-Prep-Day-JL",
"path": "Concert_Prep_artist_monopoly_JL.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 8576,
"hexsha": "d0a71daa81895ef9de8d99c03fa5b6c2d4ee2cfe",
"max_line_length": 270,
"avg_line_length": 50.4470588235,
"alphanum_fraction": 0.5552705224
} |
# Notebook from tejatammali/umd.inst414
Path: Module03/04-Dimensionality.SVD.ipynb
# Dimensionality Reduction Example
Using the IMDB data, feature matrix and apply dimensionality reduction to this matrix via PCA and SVD._____no_output_____
<code>
%matplotlib inline_____no_output_____import json
import random
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.sparse import lil_matrix
from sklearn.neighbors import DistanceMetric
from sklearn.metrics import jaccard_score
from sklearn.metrics import pairwise_distances_____no_output_____# Let's restrict ourselves just to US titles
relevant_title_df = pd.read_csv("../data/us_relevant_titles.csv")
# And create a set of just these titles, so we can filter them
relevant_title_set = set(relevant_title_df["title"])_____no_output_____actor_id_to_name_map = {} # Map Actor IDs to actor names
actor_id_to_index_map = {} # Map actor IDs to a unique index of known actors
index_to_actor_ids = [] # Array mapping unique index back to actor ID (invert of actor_id_to_index_map)
index_counter = 0 # Unique actor index; increment for each new actor
known_actors = set()
movie_actor_list = [] # List of all our movies and their actors
test_count = 0
with open("../data/imdb_recent_movies.json", "r") as in_file:
for line in in_file:
this_movie = json.loads(line)
# Restrict to American movies
if this_movie["title_name"] not in relevant_title_set:
continue
# Keep track of all the actors in this movie
for actor_id,actor_name in zip(this_movie['actor_ids'],this_movie['actor_names']):
# Keep names and IDs
actor_id_to_name_map[actor_id] = actor_name
# If we've seen this actor before, skip...
if actor_id in known_actors:
continue
# ... Otherwise, add to known actor set and create new index for them
known_actors.add(actor_id)
actor_id_to_index_map[actor_id] = index_counter
index_to_actor_ids.append(actor_id)
index_counter += 1
# Finished with this film
movie_actor_list.append({
"movie": this_movie["title_name"],
"actors": set(this_movie['actor_ids']),
"genres": this_movie["title_genre"]
})_____no_output_____print("Known Actors:", len(known_actors))Known Actors: 161996
</code>
## Generate Same DataFrame using Sparse Matrics
The above will break if you have too much data. We can get around that partially with sparse matrices, where we only store the non-zero elements of the feature matrix and their indices._____no_output_____
<code>
# With sparse matrix, initialize to size of Movies x Actors of 0s
matrix_sparse = lil_matrix((len(movie_actor_list), len(known_actors)), dtype=bool)
# Update the matrix, movie by movie, setting non-zero values for the appropriate actors
for row,movie in enumerate(movie_actor_list):
for actor_id in movie["actors"]:
this_index = actor_id_to_index_map[actor_id]
matrix_sparse[row,this_index] = 1_____no_output_____df = pd.DataFrame.sparse.from_spmatrix(
matrix_sparse,
index=[m["movie"] for m in movie_actor_list],
columns=[index_to_actor_ids[i] for i in range(len(known_actors))]
)
df_____no_output_____top_k_actors = 1000_____no_output_____# Extract the most frequent actors, so we can deal with a reasonable dataset size
actor_df = df.sum(axis=0)
top_actors = set(actor_df.sort_values().tail(top_k_actors).index)_____no_output_____# Restrict the data frame to just the movies containing
#. the top k actors
reduced_df = df[top_actors] # restrict to just these top actors
# throw away movies that don't have any of these actors
reduced_df = reduced_df.loc[reduced_df.sum(axis=1) > 0]
reduced_df_____no_output_____
</code>
## Apply SVD to Feature Matrix_____no_output_____
<code>
# https://scikit-learn.org/stable/modules/generated/sklearn.decomposition.TruncatedSVD.html
from sklearn.decomposition import TruncatedSVD _____no_output_____matrix_dense = reduced_df.to_numpy()
reduced_df_____no_output_____svd = TruncatedSVD(n_components=2)_____no_output_____svd.fit(matrix_dense)_____no_output_____matrix_reduced = svd.transform(matrix_dense)_____no_output_____np.mean(matrix_reduced, axis=0)_____no_output_____plt.scatter(matrix_reduced[:,0], matrix_reduced[:,1])_____no_output_____counter = 0
for index in np.argwhere((matrix_reduced[:,0] > 1.0) & (matrix_reduced[:,1] > 0.8)):
movie_title = reduced_df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
print(this_movie["movie"])
print("\tGenres:", ", ".join(this_movie["genres"]))
print("\tActors:", ", ".join([actor_id_to_name_map[actor] for actor in this_movie["actors"]]))
counter += 1
if counter > 10:
print("...")
break The Alternate
Genres: Action, Drama
Actors: Ice-T, Eric Roberts, Bryan Genesse, Michael Madsen
Luck of the Draw
Genres: Crime, Drama, Thriller
Actors: Michael Madsen, Dennis Hopper, Eric Roberts, James Marshall
Skin Traffik
Genres: Action, Crime
Actors: Michael Madsen, Eric Roberts, Mickey Rourke
Mission: The Prophet
Genres: Action, Thriller
Actors: Aleksey Chadov, Oded Fehr, Alexander Nevsky, Eric Roberts, Stephen Baldwin, Michael Madsen, Casper Van Dien
Beyond the Game
Genres: Action, Drama
Actors: Michael Madsen, Eric Roberts, Mark Dacascos, Danny Trejo
counter = 0
for index in np.argwhere((matrix_reduced[:,0] < 0.1) & (matrix_reduced[:,1] < 0.1)):
movie_title = reduced_df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
print(this_movie["movie"])
print("\tGenres:", ", ".join(this_movie["genres"]))
print("\tActors:", ", ".join([actor_id_to_name_map[actor] for actor in this_movie["actors"]]))
counter += 1
if counter > 10:
print("...")
break Grizzly II: Revenge
Genres: Horror, Music, Thriller
Actors: George Clooney, Charlie Sheen
Crime and Punishment
Genres: Drama
Actors: John Hurt, Crispin Glover
For the Cause
Genres: Action, Adventure, Drama
Actors: Dean Cain, Justin Whalin, Thomas Ian Griffith
For the Cause
Genres: Drama
Actors: Eugene Parker, Jerod Haynes, Anthony Lemay
For the Cause
Genres: Comedy
Actors: Abdelghani Sannak, Ramzi Maqdisi, Jeremy Banster
Gang
Genres: Action, Crime, Drama
Actors: Jackie Shroff, Kumar Gaurav, Nana Patekar, Jaaved Jaaferi
Gang
Genres: Action
Actors: Ji-Hyuk Cha
In the Mood for Love
Genres: Drama, Romance
Actors: Tony Chiu-Wai Leung, Siu Ping-Lam, Tung Cho 'Joe' Cheung
Chinese Coffee
Genres: Drama
Actors: Jerry Orbach, Al Pacino
Fandango
Genres: \N
Actors: Moritz Bleibtreu, Lars Rudolph, Richy Müller
Fandango
Genres: Drama
Actors: Arturo Meseguer, Martín Zapata
The Dancer Upstairs
Genres: Crime, Drama, Thriller
Actors: Juan Diego Botto, Javier Bardem
Don's Plum
Genres: Comedy, Drama
Actors: Kevin Connolly, Tobey Maguire, Scott Bloom, Leonardo DiCaprio
Heavy Metal 2000
Genres: Action, Adventure, Animation
Actors: Pier Paquette, Michael Ironside, Billy Idol
The Sorcerer's Apprentice
Genres: Adventure, Family, Fantasy
Actors: Robert Davi, Byron Taylor
The Sorcerer's Apprentice
Genres: Action, Adventure, Family
Actors: Alfred Molina, Nicolas Cage, Jay Baruchel
...
comp1_genre_map = {}
comp1_actor_map = {}
comp1_counter = 0
for index in np.argwhere((matrix_reduced[:,0] > 1.0) & (matrix_reduced[:,1] < 0.2)):
movie_title = reduced_df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
for g in this_movie["genres"]:
comp1_genre_map[g] = comp1_genre_map.get(g, 0) + 1
for a in [actor_id_to_name_map[actor] for actor in this_movie["actors"]]:
comp1_actor_map[a] = comp1_actor_map.get(a, 0) + 1
comp1_counter += 1
print("Movies in Component 1:", comp1_counter)
print("Genres:")
for g in sorted(comp1_genre_map, key=comp1_genre_map.get, reverse=True)[:10]:
print("\t", g, comp1_genre_map[g])
print("Actors:")
for a in sorted(comp1_actor_map, key=comp1_actor_map.get, reverse=True)[:10]:
print("\t", a, comp1_actor_map[a])Movies in Component 1: 47
Genres:
Drama 29
Action 23
Thriller 22
Horror 17
Crime 10
Mystery 9
Fantasy 5
Adventure 4
Comedy 4
Sci-Fi 3
Actors:
Eric Roberts 47
Vernon Wells 4
Armand Assante 4
Martin Kove 4
Gary Daniels 3
Dean Cain 2
David A.R. White 2
John Savage 2
Aaron Groben 2
Noel Gugliemi 2
comp2_genre_map = {}
comp2_actor_map = {}
comp2_counter = 0
for index in np.argwhere((matrix_reduced[:,0] < 0.1) & (matrix_reduced[:,1] < 0.1)):
movie_title = reduced_df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
for g in this_movie["genres"]:
comp2_genre_map[g] = comp2_genre_map.get(g, 0) + 1
for a in [actor_id_to_name_map[actor] for actor in this_movie["actors"]]:
comp2_actor_map[a] = comp2_actor_map.get(a, 0) + 1
comp2_counter += 1
print("Movies in Component 2:", comp2_counter)
print("Genres:")
for g in sorted(comp2_genre_map, key=comp2_genre_map.get, reverse=True)[:10]:
print("\t", g, comp2_genre_map[g])
print("Actors:")
for a in sorted(comp2_actor_map, key=comp2_actor_map.get, reverse=True)[:10]:
print("\t", a, comp2_actor_map[a])Movies in Component 2: 18069
Genres:
Drama 17274
Comedy 7327
Thriller 5586
Action 5152
Crime 3959
Romance 3727
Horror 3165
Mystery 2093
Adventure 2032
Sci-Fi 1190
Actors:
Joe Hammerstone 91
Tony Devon 85
Louis Koo 85
Brahmanandam 79
Nicolas Cage 69
James Franco 68
Prakash Raj 65
Samuel L. Jackson 63
Willem Dafoe 63
Simon Yam 62
</code>
## Find Similar Movies in Reduced Dimensional Space_____no_output_____
<code>
query_idx = [idx for idx,m in enumerate(reduced_df.index) if m == "The Lord of the Rings: The Fellowship of the Ring"][0]
# query_idx = [idx for idx,m in enumerate(reduced_df.index) if m == "Heavy Metal 2000"][0]
# query_idx = [idx for idx,m in enumerate(reduced_df.index) if m == "Casino Royale"][0]
# query_idx = [idx for idx,m in enumerate(reduced_df.index) if m == "Star Wars: Episode II - Attack of the Clones"][0]
query_idx_____no_output_____query_v = matrix_reduced[query_idx,:]_____no_output_____query_v_____no_output_____# get distances between all films and query film
distances = pairwise_distances(matrix_reduced, [query_v], metric='euclidean')
_____no_output_____distances_df = pd.DataFrame(distances, columns=["distance"])
for idx,row in distances_df.sort_values(by="distance", ascending=True).head(20).iterrows():
print(idx, reduced_df.iloc[idx].name, row["distance"])18 The Lord of the Rings: The Fellowship of the Ring 0.0
11487 Days and Nights 1.2209541406754873e-05
18457 The Tomorrow War 1.2776674116332596e-05
6383 My Own Love Song 1.3171993015743094e-05
10102 Best Man Down 1.639756956937894e-05
15541 Kingsglaive: Final Fantasy XV 1.8152531475848637e-05
11126 Mystery Road 2.189655804804624e-05
16715 The Professor 2.5540080923330028e-05
1529 Daddy Day Care 2.589325207709699e-05
8976 Jupiter Ascending 3.269693367234955e-05
5179 Gulabo Sitabo 3.298438040957619e-05
245 Chain of Fools 3.3382949677633855e-05
11309 Guys and a Cop 4.018040821326311e-05
13328 Raped by an Angel 5 4.018040821326311e-05
17381 Film Fanatic 4.018040821326311e-05
5881 Sheep Without a Shepherd 4.018040821326311e-05
7919 Shadows 4.018040821326311e-05
8868 Fox Ghost 4.018040821326311e-05
10860 Tortured Sex Goddess of Ming Dynasty 4.018040821326311e-05
18485 Girl Dorm 4.018040821326311e-05
</code>
## SVD and Column Feature Space
Above, we focused on the *movies* in the reduced feature/"concept" space. Here, we will use SVD to map the *actors* into the reduced "concept" space._____no_output_____
<code>
# See that the shape of this matrix is *reduced space* X original features
svd.components_.shape_____no_output_____
</code>
We will use this reduced space to inspect the associations with a given actor and the concept set of concepts (i.e., the reduced space)_____no_output_____
<code>
# query_actor = [idx for idx,name in actor_id_to_name_map.items() if name == "Ewan McGregor"][0]
# query_actor = [idx for idx,name in actor_id_to_name_map.items() if name == "Eric Roberts"][0]
# query_actor = [idx for idx,name in actor_id_to_name_map.items() if name == "Jason Statham"][0]
# query_actor = [idx for idx,name in actor_id_to_name_map.items() if name == "Leonardo DiCaprio"][0]
query_actor = [idx for idx,name in actor_id_to_name_map.items() if name == "George Clooney"][0]
query_actor_____no_output_____query_actor_index = np.argwhere(reduced_df.columns == query_actor)[0,0]
query_actor_index_____no_output_____# Show the actor strengths across these concepts
svd.components_.T[query_actor_index,:] _____no_output_____# And you can use this method to evaluate distances between actors in the concept space
distances = pairwise_distances(svd.components_.T, [svd.components_.T[query_actor_index,:]], metric='euclidean')
distances_df = pd.DataFrame(distances, columns=["distance"])
for idx,row in distances_df.sort_values(by="distance", ascending=True).head(20).iterrows():
print(idx, actor_id_to_name_map[reduced_df.columns[idx]], row["distance"])397 George Clooney 0.0
854 Lalu Alex 2.1684959239110375e-05
552 Steve Carell 5.765535961270728e-05
950 Liam Neeson 8.58474608863118e-05
494 Tanikella Bharani 0.00011361512397967896
522 Gérard Depardieu 0.00014417276880813264
926 Siddique 0.0001534176616126892
888 Shawn Yue 0.00015625326486224262
584 Kunchacko Boban 0.00022063353354574957
22 Guy Pearce 0.00024232499554002016
794 Robert Downey Jr. 0.0002447877571351057
12 Paul Giamatti 0.00028857145979086976
0 Tony Ka Fai Leung 0.00029564349774853754
900 Mark Wahlberg 0.00029966525624988997
521 Nassar 0.0003054147725516829
9 Christian Bale 0.00030644785930321463
234 Danny Glover 0.000533473514063691
737 Morgan Freeman 0.0005355495134715015
209 Luke Wilson 0.0005536288382481004
236 Jagapathi Babu 0.000554439235632182
</code>
## SVD is more scalable than PCA_____no_output_____
<code>
from sklearn.decomposition import PCA_____no_output_____matrix_sparse.shape_____no_output_____# This will fail
pca = PCA(n_components=2)
pca.fit(matrix_sparse)_____no_output_____svd = TruncatedSVD(n_components=2)
svd.fit(matrix_sparse)_____no_output_____matrix_reduced = svd.transform(matrix_sparse)_____no_output_____print(np.mean(matrix_reduced, axis=0))
plt.scatter(matrix_reduced[:,0], matrix_reduced[:,1])[0.00230072 0.00068161]
comp1_genre_map = {}
comp1_actor_map = {}
comp1_counter = 0
for index in np.argwhere((matrix_reduced[:,0] > 1.0) & (matrix_reduced[:,1] < 0.2)):
movie_title = df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
for g in this_movie["genres"]:
comp1_genre_map[g] = comp1_genre_map.get(g, 0) + 1
for a in [actor_id_to_name_map[actor] for actor in this_movie["actors"]]:
comp1_actor_map[a] = comp1_actor_map.get(a, 0) + 1
comp1_counter += 1
print("Movies in Component 1:", comp1_counter)
print("Genres:")
for g in sorted(comp1_genre_map, key=comp1_genre_map.get, reverse=True)[:10]:
print("\t", g, comp1_genre_map[g])
print("Actors:")
for a in sorted(comp1_actor_map, key=comp1_actor_map.get, reverse=True)[:10]:
print("\t", a, comp1_actor_map[a])Movies in Component 1: 100
Genres:
Drama 72
Thriller 49
Action 39
Horror 31
Crime 27
Mystery 17
Comedy 13
Sci-Fi 8
Romance 8
Adventure 7
Actors:
Eric Roberts 100
Michael Madsen 5
Tom Sizemore 5
Vernon Wells 4
Armand Assante 4
Martin Kove 4
Danny Trejo 3
Gary Daniels 3
Dean Cain 2
Casper Van Dien 2
comp2_genre_map = {}
comp2_actor_map = {}
comp2_counter = 0
for index in np.argwhere((matrix_reduced[:,0] < 0.1) & (matrix_reduced[:,1] < 0.1)):
movie_title = df.iloc[index[0]].name
for this_movie in [m for m in movie_actor_list if m['movie'] == movie_title]:
for g in this_movie["genres"]:
comp2_genre_map[g] = comp2_genre_map.get(g, 0) + 1
for a in [actor_id_to_name_map[actor] for actor in this_movie["actors"]]:
comp2_actor_map[a] = comp2_actor_map.get(a, 0) + 1
comp2_counter += 1
print("Movies in Component 2:", comp2_counter)
print("Genres:")
for g in sorted(comp2_genre_map, key=comp2_genre_map.get, reverse=True)[:10]:
print("\t", g, comp2_genre_map[g])
print("Actors:")
for a in sorted(comp2_actor_map, key=comp2_actor_map.get, reverse=True)[:10]:
print("\t", a, comp2_actor_map[a])_____no_output_____
</code>
| {
"repository": "tejatammali/umd.inst414",
"path": "Module03/04-Dimensionality.SVD.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 2,
"size": 110366,
"hexsha": "d0a76f87edce1da561ad8fd7b8db89c77ff3c24c",
"max_line_length": 7924,
"avg_line_length": 45.6435070306,
"alphanum_fraction": 0.5060254064
} |
# Notebook from ManchesterBioinference/BranchedGP
Path: notebooks/Hematopoiesis.ipynb
Branching GP Regression on hematopoietic data
--
*Alexis Boukouvalas, 2017*
**Note:** this notebook is automatically generated by [Jupytext](https://jupytext.readthedocs.io/en/latest/index.html), see the README for instructions on working with it.
test change
Branching GP regression with Gaussian noise on the hematopoiesis data described in the paper "BGP: Gaussian processes for identifying branching dynamics in single cell data".
This notebook shows how to build a BGP model and plot the posterior model fit and posterior branching times._____no_output_____
<code>
import time
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import BranchedGP
plt.style.use("ggplot")
%matplotlib inline_____no_output_____
</code>
### Read the hematopoiesis data. This has been simplified to a small subset of 23 genes found to be branching.
We have also performed Monocle2 (version 2.1) - DDRTree on this data. The results loaded include the Monocle estimated pseudotime, branching assignment (state) and the DDRTree latent dimensions._____no_output_____
<code>
Y = pd.read_csv("singlecelldata/hematoData.csv", index_col=[0])
monocle = pd.read_csv("singlecelldata/hematoMonocle.csv", index_col=[0])_____no_output_____Y.head()_____no_output_____monocle.head()_____no_output_____# Plot Monocle DDRTree space
genelist = ["FLT3", "KLF1", "MPO"]
f, ax = plt.subplots(1, len(genelist), figsize=(10, 5), sharex=True, sharey=True)
for ig, g in enumerate(genelist):
y = Y[g].values
yt = np.log(1 + y / y.max())
yt = yt / yt.max()
h = ax[ig].scatter(
monocle["DDRTreeDim1"],
monocle["DDRTreeDim2"],
c=yt,
s=50,
alpha=1.0,
vmin=0,
vmax=1,
)
ax[ig].set_title(g)_____no_output_____def PlotGene(label, X, Y, s=3, alpha=1.0, ax=None):
fig = None
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(5, 5))
for li in np.unique(label):
idxN = (label == li).flatten()
ax.scatter(X[idxN], Y[idxN], s=s, alpha=alpha, label=int(np.round(li)))
return fig, ax_____no_output_____
</code>
### Fit BGP model
Notice the cell assignment uncertainty is higher for cells close to the branching point.
_____no_output_____
<code>
def FitGene(g, ns=20): # for quick results subsample data
t = time.time()
Bsearch = list(np.linspace(0.05, 0.95, 5)) + [
1.1
] # set of candidate branching points
GPy = (Y[g].iloc[::ns].values - Y[g].iloc[::ns].values.mean())[
:, None
] # remove mean from gene expression data
GPt = monocle["StretchedPseudotime"].values[::ns]
globalBranching = monocle["State"].values[::ns].astype(int)
d = BranchedGP.FitBranchingModel.FitModel(Bsearch, GPt, GPy, globalBranching)
print(g, "BGP inference completed in %.1f seconds." % (time.time() - t))
# plot BGP
fig, ax = BranchedGP.VBHelperFunctions.PlotBGPFit(
GPy, GPt, Bsearch, d, figsize=(10, 10)
)
# overplot data
f, a = PlotGene(
monocle["State"].values,
monocle["StretchedPseudotime"].values,
Y[g].values - Y[g].iloc[::ns].values.mean(),
ax=ax[0],
s=10,
alpha=0.5,
)
# Calculate Bayes factor of branching vs non-branching
bf = BranchedGP.VBHelperFunctions.CalculateBranchingEvidence(d)["logBayesFactor"]
fig.suptitle("%s log Bayes factor of branching %.1f" % (g, bf))
return d, fig, ax
d, fig, ax = FitGene("MPO")MPO BGP inference completed in 58.0 seconds.
d_c, fig_c, ax_c = FitGene("CTSG")CTSG BGP inference completed in 64.9 seconds.
</code>
| {
"repository": "ManchesterBioinference/BranchedGP",
"path": "notebooks/Hematopoiesis.ipynb",
"matched_keywords": [
"Monocle"
],
"stars": 23,
"size": 284362,
"hexsha": "d0a9316c1ed49d0dd592661e03b2886d2279fbce",
"max_line_length": 110092,
"avg_line_length": 516.0834845735,
"alphanum_fraction": 0.9333455244
} |
# Notebook from brajard/DAPPER
Path: tutorials/T4 - Dynamical systems, chaos, Lorenz.ipynb
<code>
from resources.workspace import *
%matplotlib inline_____no_output_____
</code>
## Dynamical systems
are systems (sets of equations) whose variables evolve in time (the equations contains time derivatives). As a branch of mathematics, its theory is mainly concerned with understanding the behaviour of solutions (trajectories) of the systems.
## Chaos
is also known as the butterfly effect: "a buttefly that flaps its wings in Brazil can 'cause' a hurricane in Texas".
As opposed to the opinions of Descartes/Newton/Laplace, chaos effectively means that even in a deterministic (non-stochastic) universe, we can only predict "so far" into the future. This will be illustrated below using two toy-model dynamical systems made by Edward Lorenz._____no_output_____---
## The Lorenz (1963) attractor_____no_output_____The [Lorenz-63 dynamical system](resources/DA_intro.pdf#page=22) can be derived as an extreme simplification of *Rayleigh-Bénard convection*: fluid circulation in a shallow layer of fluid uniformly heated (cooled) from below (above).
This produces the following 3 *coupled* ordinary differential equations (ODE):
$$
\begin{aligned}
\dot{x} & = \sigma(y-x) \\
\dot{y} & = \rho x - y - xz \\
\dot{z} & = -\beta z + xy
\end{aligned}
$$
where the "dot" represents the time derivative, $\frac{d}{dt}$. The state vector is $\mathbf{x} = (x,y,z)$, and the parameters are typically set to_____no_output_____
<code>
SIGMA = 10.0
BETA = 8/3
RHO = 28.0_____no_output_____
</code>
The ODEs can be coded as follows_____no_output_____
<code>
def dxdt(xyz, t0, sigma, beta, rho):
"""Compute the time-derivative of the Lorenz-63 system."""
x, y, z = xyz
return [
sigma * (y - x),
x * (rho - z) - y,
x * y - beta * z
]_____no_output_____
</code>
#### Numerical integration to compute the trajectories_____no_output_____Below is a function to numerically **integrate** the ODEs and **plot** the solutions.
<!--
This function also takes arguments to control ($\sigma$, $\beta$, $\rho$) and of the numerical integration (`N`, `T`).
-->_____no_output_____
<code>
from scipy.integrate import odeint # integrator
output_63 = [None]
@interact( sigma=(0.,50), beta=(0.,5), rho=(0.,50), N=(0,50), eps=(0.01,1), T=(0.,30))
def animate_lorenz(sigma=SIGMA, beta=BETA, rho=RHO , N=2, eps=0.01, T=1.0):
# Initial conditions: perturbations around some "proto" state
seed(1)
x0_proto = array([-6.1, 1.2, 32.5])
x0 = x0_proto + eps*randn((N, 3))
# Compute trajectories
tt = linspace(0, T, int(100*T)+1) # Time sequence for trajectory
dd = lambda x,t: dxdt(x,t, sigma,beta,rho) # Define dxdt(x,t) with fixed params.
xx = array([odeint(dd, xn, tt) for xn in x0]) # Integrate
# PLOTTING
ax = plt.figure(figsize=(10,5)).add_subplot(111, projection='3d')
ax.axis('off')
colors = plt.cm.jet(linspace(0,1,N))
for i in range(N):
ax.plot(*(xx[i,:,:].T),'-' ,c=colors[i])
#ax.scatter3D(*xx[i,0 ,:],s=20,c=colors[i],marker='<')
ax.scatter3D(*xx[i,-1,:],s=40,c=colors[i])
output_63[0] = xxWidget Javascript not detected. It may not be installed or enabled properly.
</code>
**Exc 4.2**:
* Move `T` (use your arrow keys). What does it control?
* Set `T` to something small; move the sliders for `N` and `eps`. What do they control?
* Visually investigate the system's (i.e. the trajectories') sensititivy to initial conditions by moving `T`, `N` and `eps`. Very roughtly, estimate its predictability (i.e. how far into the future one can forecasts for a fixed `eps` and a fixed skill level)?_____no_output_____### Averages_____no_output_____**Exc 4.8*:** Slide `N` and `T` to their upper bounds. Execute the code cell below. It computes the average location of the $i$-th component of the state in two ways. Do you think the histograms actually approximate the same distribution? If so, then the system is called [ergodic](https://en.wikipedia.org/wiki/Ergodic_theory#Ergodic_theorems). In that case, does it matter if one computes statistics (over the system dynamics) by using several short experiment runs or one long run?_____no_output_____
<code>
xx = output_63[0][:,:,0] # state component index 0 (must be 0,1,2)
plt.hist(xx[:,-1] ,normed=1,label="ensemble dist.",alpha=1.0) # -1: last time
plt.hist(xx[-1,:] ,normed=1,label="temporal dist.",alpha=0.5) # -1: last ensemble member
#plt.hist(xx.ravel(),normed=1,label="total distribution",alpha=0.5)
plt.legend();_____no_output_____
</code>
---
## The "Lorenz-95" model
The Lorenz-96 system
is a "1D" model, designed to simulate atmospheric convection. Each state variable $\mathbf{x}_i$ can be considered some atmospheric quantity at grid point at a fixed lattitude of the earth. The system
is given by the coupled set of ODEs,
$$
\frac{d \mathbf{x}_i}{dt} = (\mathbf{x}_{i+1} − \mathbf{x}_{i-2}) \mathbf{x}_{i-1} − \mathbf{x}_i + F
\, ,
\quad \quad i \in \{1,\ldots,m\}
\, ,
$$
where the subscript indices apply periodically.
This model is not derived from physics but has similar characterisics, such as
<ul>
<li> there is external forcing, determined by a parameter $F$;</li>
<li> there is internal dissipation, emulated by the linear term;</li>
<li> there is energy-conserving advection, emulated by quadratic terms.</li>
</ul>
[Further description](resources/DA_intro.pdf#page=23)._____no_output_____**Exc 4.10:** Show that the "total energy" $\sum_{i=1}^{m} \mathbf{x}_i^2$ is preserved by the quadratic terms in the ODE.
_____no_output_____
<code>
show_answer("Hint: Lorenz energy")_____no_output_____show_answer("Lorenz energy")_____no_output_____
</code>
The model is animated below._____no_output_____
<code>
# For all i, any n: s(x,n) := x[i+n], circularly.
def s(x,n):
return np.roll(x,-n)
output_95 = [None]
def animate_lorenz_95(m=40,Force=8.0,eps=0.01,T=0):
# Initial conditions: perturbations
x0 = zeros(m)
x0[0] = eps
def dxdt(x,t):
return (s(x,1)-s(x,-2))*s(x,-1) - x + Force
tt = linspace(0, T, int(40*T)+1)
xx = odeint(lambda x,t: dxdt(x,t), x0, tt)
output_95[0] = xx
plt.figure(figsize=(7,4))
# Plot last only
#plt.plot(xx[-1],'b')
# Plot multiple
Lag = 8
colors = plt.cm.cubehelix(0.1+0.6*linspace(0,1,Lag))
for k in range(Lag,0,-1):
plt.plot(xx[max(0,len(xx)-k)],c=colors[Lag-k])
plt.ylim(-10,20)
plt.show()
interact(animate_lorenz_95,eps=(0.01,3,0.1),T=(0.05,40,0.05),Force=(0,40,1),m=(5,60,1));Widget Javascript not detected. It may not be installed or enabled properly.
</code>
**Exc 4.12:** Under which settings of the force `F` is the system chaotic?_____no_output_____---
## Error/perturbation dynamics_____no_output_____**Exc 4.14*:** Suppose $x(t)$ and $z(t)$ are "twins": they evolve according to the same law $f$:
$$\frac{dx}{dt} = f(x) \\ \frac{dz}{dt} = f(z) \, .$$
* a) Define the "error": $\varepsilon(t) = x(t) - z(t)$.
Suppose $z(0)$ is close to $x(0)$.
Let $F = \frac{df}{dx}(x(t))$.
Show that the error evolves according to the ordinary differential equation (ODE)
$$\frac{d \varepsilon}{dt} \approx F \varepsilon \, .$$
* b) Show that the error grows exponentially: $\varepsilon(t) = \varepsilon(0) e^{F t} $.
* c)
* 1) Suppose $F<1$.
What happens to the error?
What does this mean for predictability?
* 2) Now suppose $F>1$.
Given that all observations are uncertain (i.e. $R_t>0$, if only ever so slightly),
can we ever hope to estimate $x(t)$ with 0 uncertainty?
* d) Consider the ODE derived above.
How might we change it in order to model (i.e. emulate) a saturation of the error at some level?
Can you solve this equation?
* e) Now suppose $z(t)$ evolves according to $\frac{dz}{dt} = g(z)$, with $g \neq f$.
What is now the differential equation governing the evolution of the error, $\varepsilon$?_____no_output_____
<code>
show_answer("error evolution")_____no_output_____
</code>
**Exc 4.16*:** Recall the Lorenz-63 system. What is its doubling time (i.e. estimate how long does it take for two trajectories to grow twice as far apart as they were to start with) ?
*Hint: Set `N=50, eps=0.01, T=1,` and compute the spread of the particles now as compared to how they started*_____no_output_____
<code>
xx = output_63[0][:,-1] # Ensemble of particles at the end of integration
### compute your answer here ###_____no_output_____show_answer("doubling time")_____no_output_____
</code>
The answer actually depends on where in "phase space" the particles started.
To get a universal answer one must average these experiments for many different initial conditions._____no_output_____---
## In summary:
Prediction (forecasting) with these systems is challenging because they are chaotic: small errors grow exponentially.
Conversely: chaos means that there is a limit to how far into the future we can make predictions (skillfully).
It is therefore crucial to minimize the intial error as much as possible. This is a task for DA._____no_output_____### Next: [Ensemble [Monte-Carlo] approach](T5 - Ensemble [Monte-Carlo] approach.ipynb)_____no_output_____
| {
"repository": "brajard/DAPPER",
"path": "tutorials/T4 - Dynamical systems, chaos, Lorenz.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 560874,
"hexsha": "d0a9718631c4df6ac673214ea7d25145418710e2",
"max_line_length": 93914,
"avg_line_length": 904.635483871,
"alphanum_fraction": 0.9410331019
} |
# Notebook from JohannesBuchner/gammapy
Path: docs/tutorials/mcmc_sampling.ipynb
# MCMC sampling using the emcee package
## Introduction
The goal of Markov Chain Monte Carlo (MCMC) algorithms is to approximate the posterior distribution of your model parameters by random sampling in a probabilistic space. For most readers this sentence was probably not very helpful so here we'll start straight with and example but you should read the more detailed mathematical approaches of the method [here](https://www.pas.rochester.edu/~sybenzvi/courses/phy403/2015s/p403_17_mcmc.pdf) and [here](https://github.com/jakevdp/BayesianAstronomy/blob/master/03-Bayesian-Modeling-With-MCMC.ipynb).
### How does it work ?
The idea is that we use a number of walkers that will sample the posterior distribution (i.e. sample the Likelihood profile).
The goal is to produce a "chain", i.e. a list of $\theta$ values, where each $\theta$ is a vector of parameters for your model.<br>
If you start far away from the truth value, the chain will take some time to converge until it reaches a stationary state. Once it has reached this stage, each successive elements of the chain are samples of the target posterior distribution.<br>
This means that, once we have obtained the chain of samples, we have everything we need. We can compute the distribution of each parameter by simply approximating it with the histogram of the samples projected into the parameter space. This will provide the errors and correlations between parameters.
Now let's try to put a picture on the ideas described above. With this notebook, we have simulated and carried out a MCMC analysis for a source with the following parameters:<br>
$Index=2.0$, $Norm=5\times10^{-12}$ cm$^{-2}$ s$^{-1}$ TeV$^{-1}$, $Lambda =(1/Ecut) = 0.02$ TeV$^{-1}$ (50 TeV) for 20 hours.
The results that you can get from a MCMC analysis will look like this :
<img src="images/gammapy_mcmc.png" width="800">
On the first two top panels, we show the pseudo-random walk of one walker from an offset starting value to see it evolve to a better solution.
In the bottom right panel, we show the trace of each 16 walkers for 500 runs (the chain described previsouly). For the first 100 runs, the parameter evolve towards a solution (can be viewed as a fitting step). Then they explore the local minimum for 400 runs which will be used to estimate the parameters correlations and errors.
The choice of the Nburn value (when walkers have reached a stationary stage) can be done by eye but you can also look at the autocorrelation time.
### Why should I use it ?
When it comes to evaluate errors and investigate parameter correlation, one typically estimate the Likelihood in a gridded search (2D Likelihood profiles). Each point of the grid implies a new model fitting. If we use 10 steps for each parameters, we will need to carry out 100 fitting procedures.
Now let's say that I have a model with $N$ parameters, we need to carry out that gridded analysis $N*(N-1)$ times.
So for 5 free parameters you need 20 gridded search, resulting in 2000 individual fit.
Clearly this strategy doesn't scale well to high-dimensional models.
Just for fun: if each fit procedure takes 10s, we're talking about 5h of computing time to estimate the correlation plots.
There are many MCMC packages in the python ecosystem but here we will focus on [emcee](https://emcee.readthedocs.io), a lightweight Python package. A description is provided here : [Foreman-Mackey, Hogg, Lang & Goodman (2012)](https://arxiv.org/abs/1202.3665)._____no_output_____
<code>
%matplotlib inline
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")_____no_output_____import numpy as np
import astropy.units as u
from astropy.coordinates import SkyCoord
from gammapy.irf import load_cta_irfs
from gammapy.maps import WcsGeom, MapAxis
from gammapy.modeling.models import (
ExpCutoffPowerLawSpectralModel,
GaussianSpatialModel,
SkyModel,
Models,
FoVBackgroundModel,
)
from gammapy.datasets import MapDataset
from gammapy.makers import MapDatasetMaker
from gammapy.data import Observation
from gammapy.modeling.sampling import (
run_mcmc,
par_to_model,
plot_corner,
plot_trace,
)
from gammapy.modeling import Fit_____no_output_____import logging
logging.basicConfig(level=logging.INFO)_____no_output_____
</code>
## Simulate an observation
Here we will start by simulating an observation using the `simulate_dataset` method._____no_output_____
<code>
irfs = load_cta_irfs(
"$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits"
)
observation = Observation.create(
pointing=SkyCoord(0 * u.deg, 0 * u.deg, frame="galactic"),
livetime=20 * u.h,
irfs=irfs,
)_____no_output_____# Define map geometry
axis = MapAxis.from_edges(
np.logspace(-1, 2, 15), unit="TeV", name="energy", interp="log"
)
geom = WcsGeom.create(
skydir=(0, 0), binsz=0.05, width=(2, 2), frame="galactic", axes=[axis]
)
empty_dataset = MapDataset.create(geom=geom, name="dataset-mcmc")
maker = MapDatasetMaker(selection=["background", "edisp", "psf", "exposure"])
dataset = maker.run(empty_dataset, observation)_____no_output_____# Define sky model to simulate the data
spatial_model = GaussianSpatialModel(
lon_0="0 deg", lat_0="0 deg", sigma="0.2 deg", frame="galactic"
)
spectral_model = ExpCutoffPowerLawSpectralModel(
index=2,
amplitude="3e-12 cm-2 s-1 TeV-1",
reference="1 TeV",
lambda_="0.05 TeV-1",
)
sky_model_simu = SkyModel(
spatial_model=spatial_model, spectral_model=spectral_model, name="source"
)
bkg_model = FoVBackgroundModel(dataset_name="dataset-mcmc")
models = Models([sky_model_simu, bkg_model])
print(models)_____no_output_____dataset.models = models
dataset.fake()_____no_output_____dataset.counts.sum_over_axes().plot(add_cbar=True);_____no_output_____# If you want to fit the data for comparison with MCMC later
# fit = Fit(dataset)
# result = fit.run(optimize_opts={"print_level": 1})_____no_output_____
</code>
## Estimate parameter correlations with MCMC
Now let's analyse the simulated data.
Here we just fit it again with the same model we had before as a starting point.
The data that would be needed are the following:
- counts cube, psf cube, exposure cube and background model
Luckily all those maps are already in the Dataset object.
We will need to define a Likelihood function and define priors on parameters.<br>
Here we will assume a uniform prior reading the min, max parameters from the sky model._____no_output_____### Define priors
This steps is a bit manual for the moment until we find a better API to define priors.<br>
Note the you **need** to define priors for each parameter otherwise your walkers can explore uncharted territories (e.g. negative norms)._____no_output_____
<code>
print(dataset)_____no_output_____# Define the free parameters and min, max values
parameters = dataset.models.parameters
parameters["sigma"].frozen = True
parameters["lon_0"].frozen = True
parameters["lat_0"].frozen = True
parameters["amplitude"].frozen = False
parameters["index"].frozen = False
parameters["lambda_"].frozen = False
parameters["norm"].frozen = True
parameters["tilt"].frozen = True
parameters["norm"].min = 0.5
parameters["norm"].max = 2
parameters["index"].min = 1
parameters["index"].max = 5
parameters["lambda_"].min = 1e-3
parameters["lambda_"].max = 1
parameters["amplitude"].min = 0.01 * parameters["amplitude"].value
parameters["amplitude"].max = 100 * parameters["amplitude"].value
parameters["sigma"].min = 0.05
parameters["sigma"].max = 1
# Setting amplitude init values a bit offset to see evolution
# Here starting close to the real value
parameters["index"].value = 2.0
parameters["amplitude"].value = 3.2e-12
parameters["lambda_"].value = 0.05
print(dataset.models)
print("stat =", dataset.stat_sum())_____no_output_____%%time
# Now let's define a function to init parameters and run the MCMC with emcee
# Depending on your number of walkers, Nrun and dimensionality, this can take a while (> minutes)
sampler = run_mcmc(dataset, nwalkers=6, nrun=150) # to speedup the notebook
# sampler=run_mcmc(dataset,nwalkers=12,nrun=1000) # more accurate contours_____no_output_____
</code>
## Plot the results
The MCMC will return a sampler object containing the trace of all walkers.<br>
The most important part is the chain attribute which is an array of shape:<br>
_(nwalkers, nrun, nfreeparam)_
The chain is then used to plot the trace of the walkers and estimate the burnin period (the time for the walkers to reach a stationary stage)._____no_output_____
<code>
plot_trace(sampler, dataset)_____no_output_____plot_corner(sampler, dataset, nburn=50)_____no_output_____
</code>
## Plot the model dispersion
Using the samples from the chain after the burn period, we can plot the different models compared to the truth model. To do this we need to the spectral models for each parameter state in the sample._____no_output_____
<code>
emin, emax = [0.1, 100] * u.TeV
nburn = 50
fig, ax = plt.subplots(1, 1, figsize=(12, 6))
for nwalk in range(0, 6):
for n in range(nburn, nburn + 100):
pars = sampler.chain[nwalk, n, :]
# set model parameters
par_to_model(dataset, pars)
spectral_model = dataset.models["source"].spectral_model
spectral_model.plot(
energy_range=(emin, emax),
ax=ax,
energy_power=2,
alpha=0.02,
color="grey",
)
sky_model_simu.spectral_model.plot(
energy_range=(emin, emax), energy_power=2, ax=ax, color="red"
);_____no_output_____
</code>
## Fun Zone
Now that you have the sampler chain, you have in your hands the entire history of each walkers in the N-Dimensional parameter space. <br>
You can for example trace the steps of each walker in any parameter space._____no_output_____
<code>
# Here we plot the trace of one walker in a given parameter space
parx, pary = 0, 1
plt.plot(sampler.chain[0, :, parx], sampler.chain[0, :, pary], "ko", ms=1)
plt.plot(
sampler.chain[0, :, parx],
sampler.chain[0, :, pary],
ls=":",
color="grey",
alpha=0.5,
)
plt.xlabel("Index")
plt.ylabel("Amplitude");_____no_output_____
</code>
## PeVatrons in CTA ?
Now it's your turn to play with this MCMC notebook. For example to test the CTA performance to measure a cutoff at very high energies (100 TeV ?).
After defining your Skymodel it can be as simple as this :_____no_output_____
<code>
# dataset = simulate_dataset(model, geom, pointing, irfs)
# sampler = run_mcmc(dataset)
# plot_trace(sampler, dataset)
# plot_corner(sampler, dataset, nburn=200)_____no_output_____
</code>
| {
"repository": "JohannesBuchner/gammapy",
"path": "docs/tutorials/mcmc_sampling.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 1,
"size": 15441,
"hexsha": "d0a9fb44f7763f12a5780ff9c174dd1b04a8c333",
"max_line_length": 554,
"avg_line_length": 34.0860927152,
"alphanum_fraction": 0.5964639596
} |
# Notebook from ekdnam/NLP-Summit-Hackathon
Path: EDA/EDA-1.ipynb
# Exploratory Data Analysis_____no_output_____## Importing the necessary libraries_____no_output_____
<code>
import pandas as pd
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import re
import nltk
print(pd.__version__)
print(np.__version__)
print(nltk.__version__)
print(matplotlib.__version__)1.0.5
1.16.6
3.5
3.2.2
path_to_input = "../input/"_____no_output_____# drop first column containing sr nos
df = pd.read_csv(path_to_input + "mtsamples.csv", index_col = 0)
print(df) description \
0 A 23-year-old white female presents with comp...
1 Consult for laparoscopic gastric bypass.
2 Consult for laparoscopic gastric bypass.
3 2-D M-Mode. Doppler.
4 2-D Echocardiogram
... ...
4994 Patient having severe sinusitis about two to ...
4995 This is a 14-month-old baby boy Caucasian who...
4996 A female for a complete physical and follow u...
4997 Mother states he has been wheezing and coughing.
4998 Acute allergic reaction, etiology uncertain, ...
medical_specialty sample_name \
0 Allergy / Immunology Allergic Rhinitis
1 Bariatrics Laparoscopic Gastric Bypass Consult - 2
2 Bariatrics Laparoscopic Gastric Bypass Consult - 1
3 Cardiovascular / Pulmonary 2-D Echocardiogram - 1
4 Cardiovascular / Pulmonary 2-D Echocardiogram - 2
... ... ...
4994 Allergy / Immunology Chronic Sinusitis
4995 Allergy / Immunology Kawasaki Disease - Discharge Summary
4996 Allergy / Immunology Followup on Asthma
4997 Allergy / Immunology Asthma in a 5-year-old
4998 Allergy / Immunology Allergy Evaluation Consult
transcription \
0 SUBJECTIVE:, This 23-year-old white female pr...
1 PAST MEDICAL HISTORY:, He has difficulty climb...
2 HISTORY OF PRESENT ILLNESS: , I have seen ABC ...
3 2-D M-MODE: , ,1. Left atrial enlargement wit...
4 1. The left ventricular cavity size and wall ...
... ...
4994 HISTORY:, I had the pleasure of meeting and e...
4995 ADMITTING DIAGNOSIS: , Kawasaki disease.,DISCH...
4996 SUBJECTIVE: , This is a 42-year-old white fema...
4997 CHIEF COMPLAINT: , This 5-year-old male presen...
4998 HISTORY: , A 34-year-old male presents today s...
keywords
0 allergy / immunology, allergic rhinitis, aller...
1 bariatrics, laparoscopic gastric bypass, weigh...
2 bariatrics, laparoscopic gastric bypass, heart...
3 cardiovascular / pulmonary, 2-d m-mode, dopple...
4 cardiovascular / pulmonary, 2-d, doppler, echo...
... ...
4994 NaN
4995 allergy / immunology, mucous membranes, conjun...
4996 NaN
4997 NaN
4998 NaN
[4999 rows x 5 columns]
print(df.shape)
df.head()(4999, 5)
</code>
There are 4999 records in the dataset_____no_output_____
<code>
df_v1 = df[['medical_specialty', 'sample_name', 'transcription', 'keywords']]_____no_output_____df_v1.head()_____no_output_____print(df_v1['medical_specialty'].value_counts())
print("Unique records in medical_specialty :" + str(df_v1['medical_specialty'].value_counts().size)) Surgery 1103
Consult - History and Phy. 516
Cardiovascular / Pulmonary 372
Orthopedic 355
Radiology 273
General Medicine 259
Gastroenterology 230
Neurology 223
SOAP / Chart / Progress Notes 166
Obstetrics / Gynecology 160
Urology 158
Discharge Summary 108
ENT - Otolaryngology 98
Neurosurgery 94
Hematology - Oncology 90
Ophthalmology 83
Nephrology 81
Emergency Room Reports 75
Pediatrics - Neonatal 70
Pain Management 62
Psychiatry / Psychology 53
Office Notes 51
Podiatry 47
Dermatology 29
Cosmetic / Plastic Surgery 27
Dentistry 27
Letters 23
Physical Medicine - Rehab 21
Sleep Medicine 20
Endocrinology 19
Bariatrics 18
IME-QME-Work Comp etc. 16
Chiropractic 14
Diets and Nutritions 10
Rheumatology 10
Speech - Language 9
Autopsy 8
Lab Medicine - Pathology 8
Allergy / Immunology 7
Hospice - Palliative Care 6
Name: medical_specialty, dtype: int64
Unique records in medical_specialty :40
df_v1['sample_name'].value_counts()_____no_output_____
</code>
Dropping rows having na values_____no_output_____
<code>
df_v1 = df_v1.dropna()
print(df_v1.shape[0])3898
</code>
Thus we can see that the size of the dataset has decreased by 1101 rows_____no_output_____
| {
"repository": "ekdnam/NLP-Summit-Hackathon",
"path": "EDA/EDA-1.ipynb",
"matched_keywords": [
"immunology"
],
"stars": null,
"size": 20554,
"hexsha": "d0aa5ed3c2fe7d3bb1087c2b25fcbf4a2b48e9f0",
"max_line_length": 108,
"avg_line_length": 36.4432624113,
"alphanum_fraction": 0.4303298628
} |
# Notebook from jingxlim/tutmom
Path: mystic.ipynb
# Optimiztion with `mystic`_____no_output_____
<code>
%matplotlib notebook_____no_output_____
</code>
`mystic`: approximates that `scipy.optimize` interface_____no_output_____
<code>
"""
Example:
- Minimize Rosenbrock's Function with Nelder-Mead.
- Plot of parameter convergence to function minimum.
Demonstrates:
- standard models
- minimal solver interface
- parameter trajectories using retall
"""
# Nelder-Mead solver
from mystic.solvers import fmin
# Rosenbrock function
from mystic.models import rosen
# tools
import pylab
if __name__ == '__main__':
# initial guess
x0 = [0.8,1.2,0.7]
# use Nelder-Mead to minimize the Rosenbrock function
solution = fmin(rosen, x0, disp=0, retall=1)
allvecs = solution[-1]
# plot the parameter trajectories
pylab.plot([i[0] for i in allvecs])
pylab.plot([i[1] for i in allvecs])
pylab.plot([i[2] for i in allvecs])
# draw the plot
pylab.title("Rosenbrock parameter convergence")
pylab.xlabel("Nelder-Mead solver iterations")
pylab.ylabel("parameter value")
pylab.legend(["x", "y", "z"])
pylab.show()_____no_output_____
</code>
Diagnostic tools_____no_output_____* Callbacks_____no_output_____
<code>
"""
Example:
- Minimize Rosenbrock's Function with Nelder-Mead.
- Dynamic plot of parameter convergence to function minimum.
Demonstrates:
- standard models
- minimal solver interface
- parameter trajectories using callback
- solver interactivity
"""
# Nelder-Mead solver
from mystic.solvers import fmin
# Rosenbrock function
from mystic.models import rosen
# tools
from mystic.tools import getch
import pylab
pylab.ion()
# draw the plot
def plot_frame():
pylab.title("Rosenbrock parameter convergence")
pylab.xlabel("Nelder-Mead solver iterations")
pylab.ylabel("parameter value")
pylab.draw()
return
iter = 0
step, xval, yval, zval = [], [], [], []
# plot the parameter trajectories
def plot_params(params):
global iter, step, xval, yval, zval
step.append(iter)
xval.append(params[0])
yval.append(params[1])
zval.append(params[2])
pylab.plot(step,xval,'b-')
pylab.plot(step,yval,'g-')
pylab.plot(step,zval,'r-')
pylab.legend(["x", "y", "z"])
pylab.draw()
iter += 1
return
if __name__ == '__main__':
# initial guess
x0 = [0.8,1.2,0.7]
# suggest that the user interacts with the solver
print("NOTE: while solver is running, press 'Ctrl-C' in console window")
getch()
plot_frame()
# use Nelder-Mead to minimize the Rosenbrock function
solution = fmin(rosen, x0, disp=1, callback=plot_params, handler=True)
print(solution)
# don't exit until user is ready
getch()
NOTE: while solver is running, press 'Ctrl-C' in console window
Press any key to continue and press enter
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 120
Function evaluations: 215
[ 1.00000328 1.00000565 1.00001091]
Press any key to continue and press enter
</code>
**NOTE** IPython does not handle shell prompt interactive programs well, so the above should be run from a command prompt. An IPython-safe version is below._____no_output_____
<code>
"""
Example:
- Minimize Rosenbrock's Function with Powell's method.
- Dynamic print of parameter convergence to function minimum.
Demonstrates:
- standard models
- minimal solver interface
- parameter trajectories using callback
"""
# Powell's Directonal solver
from mystic.solvers import fmin_powell
# Rosenbrock function
from mystic.models import rosen
iter = 0
# plot the parameter trajectories
def print_params(params):
global iter
from numpy import asarray
print("Generation %d has best fit parameters: %s" % (iter,asarray(params)))
iter += 1
return
if __name__ == '__main__':
# initial guess
x0 = [0.8,1.2,0.7]
print_params(x0)
# use Powell's method to minimize the Rosenbrock function
solution = fmin_powell(rosen, x0, disp=1, callback=print_params, handler=False)
print(solution)Generation 0 has best fit parameters: [ 0.8 1.2 0.7]
Generation 1 has best fit parameters: [ 0.8 1.2 0.7]
Generation 2 has best fit parameters: [ 1.096641 0.92316246 0.85222892]
Generation 3 has best fit parameters: [ 0.96098383 0.92341029 0.85268657]
Generation 4 has best fit parameters: [ 0.96116068 0.92362873 0.85268597]
Generation 5 has best fit parameters: [ 0.96139941 0.92394456 0.85319715]
Generation 6 has best fit parameters: [ 0.96490397 0.9293998 0.86287626]
Generation 7 has best fit parameters: [ 0.97283782 0.9438172 0.8900223 ]
Generation 8 has best fit parameters: [ 0.99282304 0.98392465 0.9676975 ]
Generation 9 has best fit parameters: [ 0.99599362 0.99123752 0.98220233]
Generation 10 has best fit parameters: [ 0.99933371 0.99875944 0.9973022 ]
Generation 11 has best fit parameters: [ 0.99959358 0.99924252 0.99835369]
Generation 12 has best fit parameters: [ 1.00000002 1.00000006 1.00000011]
Generation 13 has best fit parameters: [ 1. 1. 1.]
Generation 14 has best fit parameters: [ 1. 1. 1.]
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 13
Function evaluations: 524
[ 1. 1. 1.]
</code>
* Monitors_____no_output_____
<code>
"""
Example:
- Minimize Rosenbrock's Function with Powell's method.
Demonstrates:
- standard models
- minimal solver interface
- customized monitors
"""
# Powell's Directonal solver
from mystic.solvers import fmin_powell
# Rosenbrock function
from mystic.models import rosen
# tools
from mystic.monitors import VerboseLoggingMonitor
if __name__ == '__main__':
print("Powell's Method")
print("===============")
# initial guess
x0 = [1.5, 1.5, 0.7]
# configure monitor
stepmon = VerboseLoggingMonitor(1,1)
# use Powell's method to minimize the Rosenbrock function
solution = fmin_powell(rosen, x0, itermon=stepmon)
print(solution)Powell's Method
===============
Generation 0 has Chi-Squared: 297.000000
Generation 1 has Chi-Squared: 26.522040
Generation 2 has Chi-Squared: 0.002383
Generation 3 has Chi-Squared: 0.002378
Generation 4 has Chi-Squared: 0.001940
Generation 5 has Chi-Squared: 0.001141
Generation 6 has Chi-Squared: 0.000769
Generation 7 has Chi-Squared: 0.000125
Generation 8 has Chi-Squared: 0.000042
Generation 9 has Chi-Squared: 0.000000
Generation 10 has Chi-Squared: 0.000000
Generation 11 has Chi-Squared: 0.000000
Generation 12 has Chi-Squared: 0.000000
Generation 13 has Chi-Squared: 0.000000
Generation 14 has Chi-Squared: 0.000000
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 14
Function evaluations: 529
STOP("NormalizedChangeOverGeneration with {'tolerance': 0.0001, 'generations': 2}")
[ 1. 1. 1.]
import mystic
mystic.log_reader('log.txt')_____no_output_____
</code>
* Solution trajectory and model plotting_____no_output_____
<code>
import mystic
mystic.model_plotter(mystic.models.rosen, 'log.txt', kwds='-d -x 1 -b "-2:2:.1, -2:2:.1, 1"')_____no_output_____
</code>
Solver "tuning" and extension_____no_output_____* Solver class interface_____no_output_____
<code>
"""
Example:
- Solve 8th-order Chebyshev polynomial coefficients with DE.
- Callable plot of fitting to Chebyshev polynomial.
- Monitor Chi-Squared for Chebyshev polynomial.
Demonstrates:
- standard models
- expanded solver interface
- built-in random initial guess
- customized monitors and termination conditions
- customized DE mutation strategies
- use of monitor to retrieve results information
"""
# Differential Evolution solver
from mystic.solvers import DifferentialEvolutionSolver2
# Chebyshev polynomial and cost function
from mystic.models.poly import chebyshev8, chebyshev8cost
from mystic.models.poly import chebyshev8coeffs
# tools
from mystic.termination import VTR
from mystic.strategy import Best1Exp
from mystic.monitors import VerboseMonitor
from mystic.tools import getch, random_seed
from mystic.math import poly1d
import pylab
pylab.ion()
# draw the plot
def plot_exact():
pylab.title("fitting 8th-order Chebyshev polynomial coefficients")
pylab.xlabel("x")
pylab.ylabel("f(x)")
import numpy
x = numpy.arange(-1.2, 1.2001, 0.01)
exact = chebyshev8(x)
pylab.plot(x,exact,'b-')
pylab.legend(["Exact"])
pylab.axis([-1.4,1.4,-2,8],'k-')
pylab.draw()
return
# plot the polynomial
def plot_solution(params,style='y-'):
import numpy
x = numpy.arange(-1.2, 1.2001, 0.01)
f = poly1d(params)
y = f(x)
pylab.plot(x,y,style)
pylab.legend(["Exact","Fitted"])
pylab.axis([-1.4,1.4,-2,8],'k-')
pylab.draw()
return
if __name__ == '__main__':
print("Differential Evolution")
print("======================")
# set range for random initial guess
ndim = 9
x0 = [(-100,100)]*ndim
random_seed(123)
# draw frame and exact coefficients
plot_exact()
# configure monitor
stepmon = VerboseMonitor(50)
# use DE to solve 8th-order Chebyshev coefficients
npop = 10*ndim
solver = DifferentialEvolutionSolver2(ndim,npop)
solver.SetRandomInitialPoints(min=[-100]*ndim, max=[100]*ndim)
solver.SetGenerationMonitor(stepmon)
solver.enable_signal_handler()
solver.Solve(chebyshev8cost, termination=VTR(0.01), strategy=Best1Exp, \
CrossProbability=1.0, ScalingFactor=0.9, \
sigint_callback=plot_solution)
solution = solver.Solution()
# use monitor to retrieve results information
iterations = len(stepmon)
cost = stepmon.y[-1]
print("Generation %d has best Chi-Squared: %f" % (iterations, cost))
# use pretty print for polynomials
print(poly1d(solution))
# compare solution with actual 8th-order Chebyshev coefficients
print("\nActual Coefficients:\n %s\n" % poly1d(chebyshev8coeffs))
# plot solution versus exact coefficients
plot_solution(solution)
Differential Evolution
======================
Generation 0 has Chi-Squared: 76214.552493
Generation 50 has Chi-Squared: 5823.804953
Generation 100 has Chi-Squared: 912.555273
Generation 150 has Chi-Squared: 73.593950
Generation 200 has Chi-Squared: 10.411967
Generation 250 has Chi-Squared: 0.350054
Generation 300 has Chi-Squared: 0.010559
STOP("VTR with {'tolerance': 0.01, 'target': 0.0}")
Generation 312 has best Chi-Squared: 0.008604
8 7 6 5 4 3 2
127.9 x - 0.3241 x - 254.6 x + 0.7937 x + 157.8 x - 0.6282 x - 30.99 x + 0.1701 x + 0.9503
Actual Coefficients:
8 6 4 2
128 x - 256 x + 160 x - 32 x + 1
from mystic.solvers import DifferentialEvolutionSolver
print("\n".join([i for i in dir(DifferentialEvolutionSolver) if not i.startswith('_')]))Collapse
Collapsed
Finalize
SaveSolver
SetConstraints
SetEvaluationLimits
SetEvaluationMonitor
SetGenerationMonitor
SetInitialPoints
SetMultinormalInitialPoints
SetObjective
SetPenalty
SetRandomInitialPoints
SetReducer
SetSampledInitialPoints
SetSaveFrequency
SetStrictRanges
SetTermination
Solution
Solve
Step
Terminated
UpdateGenealogyRecords
bestEnergy
bestSolution
disable_signal_handler
enable_signal_handler
energy_history
evaluations
generations
solution_history
</code>
* Algorithm configurability_____no_output_____* Termination conditions_____no_output_____
<code>
from mystic.termination import VTR, ChangeOverGeneration, And, Or
stop = Or(And(VTR(), ChangeOverGeneration()), VTR(1e-8))
from mystic.models import rosen
from mystic.monitors import VerboseMonitor
from mystic.solvers import DifferentialEvolutionSolver
solver = DifferentialEvolutionSolver(3,40)
solver.SetRandomInitialPoints([-10,-10,-10],[10,10,10])
solver.SetGenerationMonitor(VerboseMonitor(10))
solver.SetTermination(stop)
solver.SetObjective(rosen)
solver.SetStrictRanges([-10,-10,-10],[10,10,10])
solver.SetEvaluationLimits(generations=600)
solver.Solve()
print(solver.bestSolution)Generation 0 has Chi-Squared: 587.458970
Generation 10 has Chi-Squared: 2.216492
Generation 20 has Chi-Squared: 1.626018
Generation 30 has Chi-Squared: 0.229984
Generation 40 has Chi-Squared: 0.229984
Generation 50 has Chi-Squared: 0.008647
Generation 60 has Chi-Squared: 0.000946
Generation 70 has Chi-Squared: 0.000109
Generation 80 has Chi-Squared: 0.000002
Generation 90 has Chi-Squared: 0.000000
STOP("VTR with {'tolerance': 1e-08, 'target': 0.0}")
[ 1.00001435 1.0000254 1.0000495 ]
</code>
* Solver population_____no_output_____
<code>
from mystic.solvers import DifferentialEvolutionSolver
from mystic.math import Distribution
import numpy as np
import pylab
# build a mystic distribution instance
dist = Distribution(np.random.normal, 5, 1)
# use the distribution instance as the initial population
solver = DifferentialEvolutionSolver(3,20)
solver.SetSampledInitialPoints(dist)
# visualize the initial population
pylab.hist(np.array(solver.population).ravel())
pylab.show()_____no_output_____
</code>
**EXERCISE:** Use `mystic` to find the minimum for the `peaks` test function, with the bound specified by the `mystic.models.peaks` documentation._____no_output_____**EXERCISE:** Use `mystic` to do a fit to the noisy data in the `scipy.optimize.curve_fit` example (the least squares fit)._____no_output_____Constraints "operators" (i.e. kernel transformations)
PENALTY: $\psi(x) = f(x) + k*p(x)$
CONSTRAINT: $\psi(x) = f(c(x)) = f(x')$_____no_output_____
<code>
from mystic.constraints import *
from mystic.penalty import quadratic_equality
from mystic.coupler import inner
from mystic.math import almostEqual
from mystic.tools import random_seed
random_seed(213)
def test_penalize():
from mystic.math.measures import mean, spread
def mean_constraint(x, target):
return mean(x) - target
def range_constraint(x, target):
return spread(x) - target
@quadratic_equality(condition=range_constraint, kwds={'target':5.0})
@quadratic_equality(condition=mean_constraint, kwds={'target':5.0})
def penalty(x):
return 0.0
def cost(x):
return abs(sum(x) - 5.0)
from mystic.solvers import fmin
from numpy import array
x = array([1,2,3,4,5])
y = fmin(cost, x, penalty=penalty, disp=False)
assert round(mean(y)) == 5.0
assert round(spread(y)) == 5.0
assert round(cost(y)) == 4*(5.0)
def test_solve():
from mystic.math.measures import mean
def mean_constraint(x, target):
return mean(x) - target
def parameter_constraint(x):
return x[-1] - x[0]
@quadratic_equality(condition=mean_constraint, kwds={'target':5.0})
@quadratic_equality(condition=parameter_constraint)
def penalty(x):
return 0.0
x = solve(penalty, guess=[2,3,1])
assert round(mean_constraint(x, 5.0)) == 0.0
assert round(parameter_constraint(x)) == 0.0
assert issolution(penalty, x)
def test_solve_constraint():
from mystic.math.measures import mean
@with_mean(1.0)
def constraint(x):
x[-1] = x[0]
return x
x = solve(constraint, guess=[2,3,1])
assert almostEqual(mean(x), 1.0, tol=1e-15)
assert x[-1] == x[0]
assert issolution(constraint, x)
def test_as_constraint():
from mystic.math.measures import mean, spread
def mean_constraint(x, target):
return mean(x) - target
def range_constraint(x, target):
return spread(x) - target
@quadratic_equality(condition=range_constraint, kwds={'target':5.0})
@quadratic_equality(condition=mean_constraint, kwds={'target':5.0})
def penalty(x):
return 0.0
ndim = 3
constraints = as_constraint(penalty, solver='fmin')
#XXX: this is expensive to evaluate, as there are nested optimizations
from numpy import arange
x = arange(ndim)
_x = constraints(x)
assert round(mean(_x)) == 5.0
assert round(spread(_x)) == 5.0
assert round(penalty(_x)) == 0.0
def cost(x):
return abs(sum(x) - 5.0)
npop = ndim*3
from mystic.solvers import diffev
y = diffev(cost, x, npop, constraints=constraints, disp=False, gtol=10)
assert round(mean(y)) == 5.0
assert round(spread(y)) == 5.0
assert round(cost(y)) == 5.0*(ndim-1)
def test_as_penalty():
from mystic.math.measures import mean, spread
@with_spread(5.0)
@with_mean(5.0)
def constraint(x):
return x
penalty = as_penalty(constraint)
from numpy import array
x = array([1,2,3,4,5])
def cost(x):
return abs(sum(x) - 5.0)
from mystic.solvers import fmin
y = fmin(cost, x, penalty=penalty, disp=False)
assert round(mean(y)) == 5.0
assert round(spread(y)) == 5.0
assert round(cost(y)) == 4*(5.0)
def test_with_penalty():
from mystic.math.measures import mean, spread
@with_penalty(quadratic_equality, kwds={'target':5.0})
def penalty(x, target):
return mean(x) - target
def cost(x):
return abs(sum(x) - 5.0)
from mystic.solvers import fmin
from numpy import array
x = array([1,2,3,4,5])
y = fmin(cost, x, penalty=penalty, disp=False)
assert round(mean(y)) == 5.0
assert round(cost(y)) == 4*(5.0)
def test_with_mean():
from mystic.math.measures import mean, impose_mean
@with_mean(5.0)
def mean_of_squared(x):
return [i**2 for i in x]
from numpy import array
x = array([1,2,3,4,5])
y = impose_mean(5, [i**2 for i in x])
assert mean(y) == 5.0
assert mean_of_squared(x) == y
def test_with_mean_spread():
from mystic.math.measures import mean, spread, impose_mean, impose_spread
@with_spread(50.0)
@with_mean(5.0)
def constrained_squared(x):
return [i**2 for i in x]
from numpy import array
x = array([1,2,3,4,5])
y = impose_spread(50.0, impose_mean(5.0,[i**2 for i in x]))
assert almostEqual(mean(y), 5.0, tol=1e-15)
assert almostEqual(spread(y), 50.0, tol=1e-15)
assert constrained_squared(x) == y
def test_constrained_solve():
from mystic.math.measures import mean, spread
@with_spread(5.0)
@with_mean(5.0)
def constraints(x):
return x
def cost(x):
return abs(sum(x) - 5.0)
from mystic.solvers import fmin_powell
from numpy import array
x = array([1,2,3,4,5])
y = fmin_powell(cost, x, constraints=constraints, disp=False)
assert almostEqual(mean(y), 5.0, tol=1e-15)
assert almostEqual(spread(y), 5.0, tol=1e-15)
assert almostEqual(cost(y), 4*(5.0), tol=1e-6)
if __name__ == '__main__':
test_penalize()
test_solve()
test_solve_constraint()
test_as_constraint()
test_as_penalty()
test_with_penalty()
test_with_mean()
test_with_mean_spread()
test_constrained_solve()_____no_output_____from mystic.coupler import and_, or_, not_
from mystic.constraints import and_ as _and, or_ as _or, not_ as _not
if __name__ == '__main__':
import numpy as np
from mystic.penalty import linear_equality, quadratic_equality
from mystic.constraints import as_constraint
x = x1,x2,x3 = (5., 5., 1.)
f = f1,f2,f3 = (np.sum, np.prod, np.average)
k = 100
solver = 'fmin_powell' #'diffev'
ptype = quadratic_equality
# case #1: couple penalties into a single constraint
p1 = lambda x: abs(x1 - f1(x))
p2 = lambda x: abs(x2 - f2(x))
p3 = lambda x: abs(x3 - f3(x))
p = (p1,p2,p3)
p = [ptype(pi)(lambda x:0.) for pi in p]
penalty = and_(*p, k=k)
constraint = as_constraint(penalty, solver=solver)
x = [1,2,3,4,5]
x_ = constraint(x)
assert round(f1(x_)) == round(x1)
assert round(f2(x_)) == round(x2)
assert round(f3(x_)) == round(x3)
# case #2: couple constraints into a single constraint
from mystic.math.measures import impose_product, impose_sum, impose_mean
from mystic.constraints import as_penalty
from mystic import random_seed
random_seed(123)
t = t1,t2,t3 = (impose_sum, impose_product, impose_mean)
c1 = lambda x: t1(x1, x)
c2 = lambda x: t2(x2, x)
c3 = lambda x: t3(x3, x)
c = (c1,c2,c3)
k=1
solver = 'buckshot' #'diffev'
ptype = linear_equality #quadratic_equality
p = [as_penalty(ci, ptype) for ci in c]
penalty = and_(*p, k=k)
constraint = as_constraint(penalty, solver=solver)
x = [1,2,3,4,5]
x_ = constraint(x)
assert round(f1(x_)) == round(x1)
assert round(f2(x_)) == round(x2)
assert round(f3(x_)) == round(x3)
# etc: more coupling of constraints
from mystic.constraints import with_mean, discrete
@with_mean(5.0)
def meanie(x):
return x
@discrete(list(range(11)))
def integers(x):
return x
c = _and(integers, meanie)
x = c([1,2,3])
assert x == integers(x) == meanie(x)
x = c([9,2,3])
assert x == integers(x) == meanie(x)
x = c([0,-2,3])
assert x == integers(x) == meanie(x)
x = c([9,-200,344])
assert x == integers(x) == meanie(x)
c = _or(meanie, integers)
x = c([1.1234, 4.23412, -9])
assert x == meanie(x) and x != integers(x)
x = c([7.0, 10.0, 0.0])
assert x == integers(x) and x != meanie(x)
x = c([6.0, 9.0, 0.0])
assert x == integers(x) == meanie(x)
x = c([3,4,5])
assert x == integers(x) and x != meanie(x)
x = c([3,4,5.5])
assert x == meanie(x) and x != integers(x)
c = _not(integers)
x = c([1,2,3])
assert x != integers(x) and x != [1,2,3] and x == c(x)
x = c([1.1,2,3])
assert x != integers(x) and x == [1.1,2,3] and x == c(x)
c = _not(meanie)
x = c([1,2,3])
assert x != meanie(x) and x == [1,2,3] and x == c(x)
x = c([4,5,6])
assert x != meanie(x) and x != [4,5,6] and x == c(x)
c = _not(_and(meanie, integers))
x = c([4,5,6])
assert x != meanie(x) and x != integers(x) and x != [4,5,6] and x == c(x)
# etc: more coupling of penalties
from mystic.penalty import quadratic_inequality
p1 = lambda x: sum(x) - 5
p2 = lambda x: min(i**2 for i in x)
p = p1,p2
p = [quadratic_inequality(pi)(lambda x:0.) for pi in p]
p1,p2 = p
penalty = and_(*p)
x = [[1,2],[-2,-1],[5,-5]]
for xi in x:
assert p1(xi) + p2(xi) == penalty(xi)
penalty = or_(*p)
for xi in x:
assert min(p1(xi),p2(xi)) == penalty(xi)
penalty = not_(p1)
for xi in x:
assert bool(p1(xi)) != bool(penalty(xi))
penalty = not_(p2)
for xi in x:
assert bool(p2(xi)) != bool(penalty(xi))
_____no_output_____
</code>
In addition to being able to generically apply information as a penalty, `mystic` provides the ability to construct constraints "operators" -- essentially applying kernel transformations that reduce optimizer search space to the space of solutions that satisfy the constraints. This can greatly accelerate convergence to a solution, as the space that the optimizer can explore is restricted._____no_output_____
<code>
"""
Example:
- Minimize Rosenbrock's Function with Powell's method.
Demonstrates:
- standard models
- minimal solver interface
- parameter constraints solver and constraints factory decorator
- statistical parameter constraints
- customized monitors
"""
# Powell's Directonal solver
from mystic.solvers import fmin_powell
# Rosenbrock function
from mystic.models import rosen
# tools
from mystic.monitors import VerboseMonitor
from mystic.math.measures import mean, impose_mean
if __name__ == '__main__':
print("Powell's Method")
print("===============")
# initial guess
x0 = [0.8,1.2,0.7]
# use the mean constraints factory decorator
from mystic.constraints import with_mean
# define constraints function
@with_mean(1.0)
def constraints(x):
# constrain the last x_i to be the same value as the first x_i
x[-1] = x[0]
return x
# configure monitor
stepmon = VerboseMonitor(1)
# use Powell's method to minimize the Rosenbrock function
solution = fmin_powell(rosen, x0, constraints=constraints, itermon=stepmon)
print(solution)
Powell's Method
===============
Generation 0 has Chi-Squared: 81.100247
Generation 1 has Chi-Squared: 0.000000
Generation 2 has Chi-Squared: 0.000000
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 2
Function evaluations: 81
STOP("NormalizedChangeOverGeneration with {'tolerance': 0.0001, 'generations': 2}")
[ 1. 1. 1.]
</code>
* Range (i.e. 'box') constraints_____no_output_____Use `solver.SetStrictRange`, or the `bounds` keyword on the solver function interface._____no_output_____* Symbolic constraints interface_____no_output_____
<code>
%%file spring.py
"a Tension-Compression String"
def objective(x):
x0,x1,x2 = x
return x0**2 * x1 * (x2 + 2)
bounds = [(0,100)]*3
# with penalty='penalty' applied, solution is:
xs = [0.05168906, 0.35671773, 11.28896619]
ys = 0.01266523
from mystic.symbolic import generate_constraint, generate_solvers, solve
from mystic.symbolic import generate_penalty, generate_conditions
equations = """
1.0 - (x1**3 * x2)/(71785*x0**4) <= 0.0
(4*x1**2 - x0*x1)/(12566*x0**3 * (x1 - x0)) + 1./(5108*x0**2) - 1.0 <= 0.0
1.0 - 140.45*x0/(x2 * x1**2) <= 0.0
(x0 + x1)/1.5 - 1.0 <= 0.0
"""
pf = generate_penalty(generate_conditions(equations), k=1e12)
if __name__ == '__main__':
from mystic.solvers import diffev2
result = diffev2(objective, x0=bounds, bounds=bounds, penalty=pf, npop=40,
gtol=500, disp=True, full_output=True)
print(result[0])Writing spring.py
equations = """
1.0 - (x1**3 * x2)/(71785*x0**4) <= 0.0
(4*x1**2 - x0*x1)/(12566*x0**3 * (x1 - x0)) + 1./(5108*x0**2) - 1.0 <= 0.0
1.0 - 140.45*x0/(x2 * x1**2) <= 0.0
(x0 + x1)/1.5 - 1.0 <= 0.0
"""
from mystic.symbolic import generate_constraint, generate_solvers, solve
from mystic.symbolic import generate_penalty, generate_conditions
ineql, eql = generate_conditions(equations)
print("CONVERTED SYMBOLIC TO SINGLE CONSTRAINTS FUNCTIONS")
print(ineql)
print(eql)
print("\nTHE INDIVIDUAL INEQUALITIES")
for f in ineql:
print(f.__doc__)
print("\nGENERATED THE PENALTY FUNCTION FOR ALL CONSTRAINTS")
pf = generate_penalty((ineql, eql))
print(pf.__doc__)
x = [-0.1, 0.5, 11.0]
print("\nPENALTY FOR {}: {}".format(x, pf(x)))CONVERTED SYMBOLIC TO SINGLE CONSTRAINTS FUNCTIONS
(<function inequality_4606002448 at 0x111d18488>, <function inequality_4604414640 at 0x111d181e0>, <function inequality_4606003984 at 0x112733b70>, <function inequality_4605118704 at 0x1127331e0>)
()
THE INDIVIDUAL INEQUALITIES
1.0 - (x[1]**3 * x[2])/(71785*x[0]**4) - (0.0)
(4*x[1]**2 - x[0]*x[1])/(12566*x[0]**3 * (x[1] - x[0])) + 1./(5108*x[0]**2) - 1.0 - (0.0)
1.0 - 140.45*x[0]/(x[2] * x[1]**2) - (0.0)
(x[0] + x[1])/1.5 - 1.0 - (0.0)
GENERATED THE PENALTY FUNCTION FOR ALL CONSTRAINTS
quadratic_inequality: 1.0 - (x[1]**3 * x[2])/(71785*x[0]**4) - (0.0)
quadratic_inequality: (4*x[1]**2 - x[0]*x[1])/(12566*x[0]**3 * (x[1] - x[0])) + 1./(5108*x[0]**2) - 1.0 - (0.0)
quadratic_inequality: 1.0 - 140.45*x[0]/(x[2] * x[1]**2) - (0.0)
quadratic_inequality: (x[0] + x[1])/1.5 - 1.0 - (0.0)
PENALTY FOR [-0.1, 0.5, 11.0]: 7590.476190957014
</code>
* Penatly functions_____no_output_____
<code>
equations = """
1.0 - (x1**3 * x2)/(71785*x0**4) <= 0.0
(4*x1**2 - x0*x1)/(12566*x0**3 * (x1 - x0)) + 1./(5108*x0**2) - 1.0 <= 0.0
1.0 - 140.45*x0/(x2 * x1**2) <= 0.0
(x0 + x1)/1.5 - 1.0 <= 0.0
"""_____no_output_____"a Tension-Compression String"
from spring import objective, bounds, xs, ys
from mystic.penalty import quadratic_inequality
def penalty1(x): # <= 0.0
return 1.0 - (x[1]**3 * x[2])/(71785*x[0]**4)
def penalty2(x): # <= 0.0
return (4*x[1]**2 - x[0]*x[1])/(12566*x[0]**3 * (x[1] - x[0])) + 1./(5108*x[0]**2) - 1.0
def penalty3(x): # <= 0.0
return 1.0 - 140.45*x[0]/(x[2] * x[1]**2)
def penalty4(x): # <= 0.0
return (x[0] + x[1])/1.5 - 1.0
@quadratic_inequality(penalty1, k=1e12)
@quadratic_inequality(penalty2, k=1e12)
@quadratic_inequality(penalty3, k=1e12)
@quadratic_inequality(penalty4, k=1e12)
def penalty(x):
return 0.0
if __name__ == '__main__':
from mystic.solvers import diffev2
result = diffev2(objective, x0=bounds, bounds=bounds, penalty=penalty, npop=40,
gtol=500, disp=True, full_output=True)
print(result[0])Optimization terminated successfully.
Current function value: 0.012665
Iterations: 540
Function evaluations: 21640
[ 0.05168906 0.35671772 11.28896693]
</code>
* "Operators" that directly constrain search space_____no_output_____
<code>
"""
Crypto problem in Google CP Solver.
Prolog benchmark problem
'''
Name : crypto.pl
Original Source: P. Van Hentenryck's book
Adapted by : Daniel Diaz - INRIA France
Date : September 1992
'''
"""
def objective(x):
return 0.0
nletters = 26
bounds = [(1,nletters)]*nletters
# with penalty='penalty' applied, solution is:
# A B C D E F G H I J K L M N O P Q
xs = [ 5, 13, 9, 16, 20, 4, 24, 21, 25, 17, 23, 2, 8, 12, 10, 19, 7, \
# R S T U V W X Y Z
11, 15, 3, 1, 26, 6, 22, 14, 18]
ys = 0.0
# constraints
equations = """
B + A + L + L + E + T - 45 == 0
C + E + L + L + O - 43 == 0
C + O + N + C + E + R + T - 74 == 0
F + L + U + T + E - 30 == 0
F + U + G + U + E - 50 == 0
G + L + E + E - 66 == 0
J + A + Z + Z - 58 == 0
L + Y + R + E - 47 == 0
O + B + O + E - 53 == 0
O + P + E + R + A - 65 == 0
P + O + L + K + A - 59 == 0
Q + U + A + R + T + E + T - 50 == 0
S + A + X + O + P + H + O + N + E - 134 == 0
S + C + A + L + E - 51 == 0
S + O + L + O - 37 == 0
S + O + N + G - 61 == 0
S + O + P + R + A + N + O - 82 == 0
T + H + E + M + E - 72 == 0
V + I + O + L + I + N - 100 == 0
W + A + L + T + Z - 34 == 0
"""
var = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
# Let's say we know the vowels.
bounds[0] = (5,5) # A
bounds[4] = (20,20) # E
bounds[8] = (25,25) # I
bounds[14] = (10,10) # O
bounds[20] = (1,1) # U
from mystic.constraints import unique, near_integers, has_unique
from mystic.symbolic import generate_penalty, generate_conditions
pf = generate_penalty(generate_conditions(equations,var),k=1)
from mystic.penalty import quadratic_equality
@quadratic_equality(near_integers)
@quadratic_equality(has_unique)
def penalty(x):
return pf(x)
from numpy import round, hstack, clip
def constraint(x):
x = round(x).astype(int) # force round and convert type to int
x = clip(x, 1,nletters) #XXX: hack to impose bounds
x = unique(x, range(1,nletters+1))
return x
if __name__ == '__main__':
from mystic.solvers import diffev2
from mystic.monitors import Monitor, VerboseMonitor
mon = VerboseMonitor(50)
result = diffev2(objective, x0=bounds, bounds=bounds, penalty=pf,
constraints=constraint, npop=52, ftol=1e-8, gtol=1000,
disp=True, full_output=True, cross=0.1, scale=0.9, itermon=mon)
print(result[0])
Generation 0 has Chi-Squared: 1495.000000
Generation 50 has Chi-Squared: 469.000000
Generation 100 has Chi-Squared: 270.000000
Generation 150 has Chi-Squared: 142.000000
Generation 200 has Chi-Squared: 124.000000
Generation 250 has Chi-Squared: 106.000000
Generation 300 has Chi-Squared: 74.000000
Generation 350 has Chi-Squared: 61.000000
Generation 400 has Chi-Squared: 38.000000
Generation 450 has Chi-Squared: 32.000000
Generation 500 has Chi-Squared: 24.000000
Generation 550 has Chi-Squared: 24.000000
Generation 600 has Chi-Squared: 23.000000
Generation 650 has Chi-Squared: 23.000000
Generation 700 has Chi-Squared: 21.000000
Generation 750 has Chi-Squared: 21.000000
Generation 800 has Chi-Squared: 17.000000
Generation 850 has Chi-Squared: 13.000000
Generation 900 has Chi-Squared: 6.000000
Generation 950 has Chi-Squared: 6.000000
Generation 1000 has Chi-Squared: 6.000000
Generation 1050 has Chi-Squared: 6.000000
Generation 1100 has Chi-Squared: 6.000000
Generation 1150 has Chi-Squared: 6.000000
Generation 1200 has Chi-Squared: 6.000000
Generation 1250 has Chi-Squared: 6.000000
Generation 1300 has Chi-Squared: 6.000000
Generation 1350 has Chi-Squared: 6.000000
Generation 1400 has Chi-Squared: 6.000000
Generation 1450 has Chi-Squared: 1.000000
Generation 1500 has Chi-Squared: 1.000000
Generation 1550 has Chi-Squared: 1.000000
Generation 1600 has Chi-Squared: 1.000000
Generation 1650 has Chi-Squared: 1.000000
Generation 1700 has Chi-Squared: 1.000000
Generation 1750 has Chi-Squared: 1.000000
Generation 1800 has Chi-Squared: 1.000000
Generation 1850 has Chi-Squared: 1.000000
Generation 1900 has Chi-Squared: 1.000000
Generation 1950 has Chi-Squared: 1.000000
Generation 2000 has Chi-Squared: 1.000000
Generation 2050 has Chi-Squared: 1.000000
Generation 2100 has Chi-Squared: 1.000000
Generation 2150 has Chi-Squared: 1.000000
Generation 2200 has Chi-Squared: 1.000000
Generation 2250 has Chi-Squared: 1.000000
Generation 2300 has Chi-Squared: 1.000000
Generation 2350 has Chi-Squared: 1.000000
Generation 2400 has Chi-Squared: 1.000000
STOP("ChangeOverGeneration with {'tolerance': 1e-08, 'generations': 1000}")
Optimization terminated successfully.
Current function value: 1.000000
Iterations: 2428
Function evaluations: 126308
[ 5. 13. 9. 16. 20. 4. 24. 22. 25. 17. 23. 2. 7. 12. 10.
19. 8. 11. 15. 3. 1. 26. 6. 21. 14. 18.]
</code>
Special cases_____no_output_____* Integer and mixed integer programming_____no_output_____
<code>
"""
Eq 10 in Google CP Solver.
Standard benchmark problem.
"""
def objective(x):
return 0.0
bounds = [(0,10)]*7
# with penalty='penalty' applied, solution is:
xs = [6., 0., 8., 4., 9., 3., 9.]
ys = 0.0
# constraints
equations = """
98527*x0 + 34588*x1 + 5872*x2 + 59422*x4 + 65159*x6 - 1547604 - 30704*x3 - 29649*x5 == 0.0
98957*x1 + 83634*x2 + 69966*x3 + 62038*x4 + 37164*x5 + 85413*x6 - 1823553 - 93989*x0 == 0.0
900032 + 10949*x0 + 77761*x1 + 67052*x4 - 80197*x2 - 61944*x3 - 92964*x5 - 44550*x6 == 0.0
73947*x0 + 84391*x2 + 81310*x4 - 1164380 - 96253*x1 - 44247*x3 - 70582*x5 - 33054*x6 == 0.0
13057*x2 + 42253*x3 + 77527*x4 + 96552*x6 - 1185471 - 60152*x0 - 21103*x1 - 97932*x5 == 0.0
1394152 + 66920*x0 + 55679*x3 - 64234*x1 - 65337*x2 - 45581*x4 - 67707*x5 - 98038*x6 == 0.0
68550*x0 + 27886*x1 + 31716*x2 + 73597*x3 + 38835*x6 - 279091 - 88963*x4 - 76391*x5 == 0.0
76132*x1 + 71860*x2 + 22770*x3 + 68211*x4 + 78587*x5 - 480923 - 48224*x0 - 82817*x6 == 0.0
519878 + 94198*x1 + 87234*x2 + 37498*x3 - 71583*x0 - 25728*x4 - 25495*x5 - 70023*x6 == 0.0
361921 + 78693*x0 + 38592*x4 + 38478*x5 - 94129*x1 - 43188*x2 - 82528*x3 - 69025*x6 == 0.0
"""
from mystic.symbolic import generate_constraint, generate_solvers, solve
cf = generate_constraint(generate_solvers(solve(equations)))
if __name__ == '__main__':
from mystic.solvers import diffev2
result = diffev2(objective, x0=bounds, bounds=bounds, constraints=cf,
npop=4, gtol=1, disp=True, full_output=True)
print(result[0])Optimization terminated successfully.
Current function value: 0.000000
Iterations: 1
Function evaluations: 14
[ 6. 0. 8. 4. 9. 3. 9.]
</code>
**EXERCISE:** Solve the `chebyshev8.cost` example exactly, by applying the knowledge that the last term in the chebyshev polynomial will always be be one. Use `numpy.round` or `mystic.constraints.integers` or to constrain solutions to the set of integers. Does using `mystic.suppressed` to supress small numbers accelerate the solution?_____no_output_____**EXERCISE:** Replace the symbolic constraints in the following "Pressure Vessel Design" code with explicit penalty functions (i.e. use a compound penalty built with `mystic.penalty.quadratic_inequality`)._____no_output_____
<code>
"Pressure Vessel Design"
def objective(x):
x0,x1,x2,x3 = x
return 0.6224*x0*x2*x3 + 1.7781*x1*x2**2 + 3.1661*x0**2*x3 + 19.84*x0**2*x2
bounds = [(0,1e6)]*4
# with penalty='penalty' applied, solution is:
xs = [0.72759093, 0.35964857, 37.69901188, 240.0]
ys = 5804.3762083
from mystic.symbolic import generate_constraint, generate_solvers, solve
from mystic.symbolic import generate_penalty, generate_conditions
equations = """
-x0 + 0.0193*x2 <= 0.0
-x1 + 0.00954*x2 <= 0.0
-pi*x2**2*x3 - (4/3.)*pi*x2**3 + 1296000.0 <= 0.0
x3 - 240.0 <= 0.0
"""
pf = generate_penalty(generate_conditions(equations), k=1e12)
if __name__ == '__main__':
from mystic.solvers import diffev2
from mystic.math import almostEqual
result = diffev2(objective, x0=bounds, bounds=bounds, penalty=pf, npop=40, gtol=500,
disp=True, full_output=True)
print(result[0])
Optimization terminated successfully.
Current function value: 5804.376208
Iterations: 950
Function evaluations: 38040
[ 0.72759093 0.35964857 37.69901188 240. ]
</code>
* Linear and quadratic constraints_____no_output_____
<code>
"""
Minimize: f = 2*x[0] + 1*x[1]
Subject to: -1*x[0] + 1*x[1] <= 1
1*x[0] + 1*x[1] >= 2
1*x[1] >= 0
1*x[0] - 2*x[1] <= 4
where: -inf <= x[0] <= inf
"""
def objective(x):
x0,x1 = x
return 2*x0 + x1
equations = """
-x0 + x1 - 1.0 <= 0.0
-x0 - x1 + 2.0 <= 0.0
x0 - 2*x1 - 4.0 <= 0.0
"""
bounds = [(None, None),(0.0, None)]
# with penalty='penalty' applied, solution is:
xs = [0.5, 1.5]
ys = 2.5
from mystic.symbolic import generate_conditions, generate_penalty
pf = generate_penalty(generate_conditions(equations), k=1e3)
from mystic.symbolic import generate_constraint, generate_solvers, simplify
cf = generate_constraint(generate_solvers(simplify(equations)))
if __name__ == '__main__':
from mystic.solvers import fmin_powell
from mystic.math import almostEqual
result = fmin_powell(objective, x0=[0.0,0.0], bounds=bounds, constraint=cf,
penalty=pf, disp=True, full_output=True, gtol=3)
print(result[0])Optimization terminated successfully.
Current function value: 2.499688
Iterations: 6
Function evaluations: 277
[ 0.49974959 1.49987526]
</code>
**EXERCISE:** Solve the `cvxopt` "qp" example with `mystic`. Use symbolic constaints, penalty functions, or constraints operators. If you get it quickly, do all three methods._____no_output_____Let's look at how `mystic` gives improved [solver workflow](workflow.ipynb)_____no_output_____
| {
"repository": "jingxlim/tutmom",
"path": "mystic.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 236,
"size": 215664,
"hexsha": "d0ab2e86a60160e8b1d227f0d6d5c7794193cc10",
"max_line_length": 72772,
"avg_line_length": 119.9466073415,
"alphanum_fraction": 0.8392128496
} |
# Notebook from phucnsp/blog
Path: _notebooks/draft/Graph.ipynb
# Graph
> in progress
- toc: true
- badges: true
- comments: true
- categories: [self-taught]
- image: images/bone.jpeg
- hide: true_____no_output_____https://towardsdatascience.com/using-graph-convolutional-neural-networks-on-structured-documents-for-information-extraction-c1088dcd2b8f
CNNs effectively capture patterns in data in Euclidean space
data is represented in the form of a Graph and lack a grid-like regularity.
As Graphs can be irregular, they may have a variable size of un-ordered nodes and each node may have a different number of neighbors, resulting in mathematical operations such as convolutions difficult to apply to the Graph domain.
Some examples of such non-Euclidean data include:
- Protein-Protein Interaction Data where interactions between molecules are modeled as graphs
- Citation Networks where scientific papers are nodes and citations are uni- or bi-directional edges
- Social Networks where people on the network are nodes and their relationships are edges
This article particularly discusses the use of Graph Convolutional Neural Networks (GCNs) on structured documents such as Invoices and Bills to automate the extraction of meaningful information by learning positional relationships between text entities.
What is a Graph?
**How to convert Structured Documents to Graphs?**
Such recurring structural information along with text attributes can help a Graph Neural Network learn neighborhood representations and perform node classification as a result
Geometric Algorithm: Connecting objects based on visibility
**Convolution on Document Graphs for Information Extraction**
_____no_output_____# References_____no_output_____https://towardsdatascience.com/overview-of-deep-learning-on-graph-embeddings-4305c10ad4a4
Graph embedding_____no_output_____https://towardsdatascience.com/graph-convolutional-networks-for-geometric-deep-learning-1faf17dee008m
Graph Conv_____no_output_____https://arxiv.org/pdf/1611.08097.pdf
https://arxiv.org/pdf/1901.00596.pdf_____no_output_____https://towardsdatascience.com/graph-theory-and-deep-learning-know-hows-6556b0e9891b
**Everything you need to know about Graph Theory for Deep Learning**
Graph Theory — crash course
1. What is a graph?
A graph, in the context of graph theory, is a structured datatype that has nodes (entities that hold information) and edges (connections between nodes that can also hold information). A graph is a way of structuring data, but can be a datapoint itself. Graphs are a type of Non-Euclidean data, which means they exist in 3D, unlike other datatypes like images, text, and audio.
- Graphs can have labels on their edges and/or nodes
- Labels can also be considered weights, but that’s up to the graph’s designer.
- Labels don’t have to be numerical, they can be textual.
- Labels don’t have to be unique;
- Graphs can have features (a.k.a attributes).
Take care not to mix up features and labels.
> Note: a node is a person, a node’s label is a person’s name, and the node’s features are the person’s characteristics.
- Graphs can be directed or undirected
- A node in the graph can even have an edge that points/connects to itself. This is known as a self-loop.
Graphs can be either:
- Heterogeneous — composed of different types of nodes
- Homogeneous — composed of the same type of nodes
and are either:
- Static — nodes and edges do not change, nothing is added or taken away
- Dynamic — nodes and edges change, added, deleted, moved, etc.
graphs can be vaguely described as either
- Dense — composed of many nodes and edges
- Sparse — composed of fewer nodes and edges
Graphs can be made to look neater by turning them into their planar form, which basically means rearranging nodes such that edges don’t intersect
2. Graph Analysis
3. E-graphs — graphs on computers
_____no_output_____https://medium.com/@flawnsontong1/what-is-geometric-deep-learning-b2adb662d91d
**What is Geometric Deep Learning?**
The vast majority of deep learning is performed on Euclidean data. This includes datatypes in the 1-dimensional and 2-dimensional domain.
Images, text, audio, and many others are all euclidean data.
`Non-euclidean data` can represent more complex items and concepts with more accuracy than 1D or 2D representation:
When we represent things in a non-euclidean way, we are giving it an inductive bias.
An inductive bias allows a learning algorithm to prioritize one solution (or interpretation) over another, independent of the observed data. Inductive biases can express assumptions about either the data-generating process or the space of solutions.
In the majority of current research pursuits and literature, the inductive bias that is used is relational.
Building on this intuition, `Geometric Deep Learning (GDL)` is the niche field under the umbrella of deep learning that aims to build neural networks that can learn from non-euclidean data.
The prime example of a non-euclidean datatype is a graph. `Graphs` are a type of data structure that consists of `nodes` (entities) that are connected with `edges` (relationships). This abstract data structure can be used to model almost anything.
We want to be able to learn from graphs because:
`
Graphs allow us to represent individual features, while also providing information regarding relationships and structure.
`
`Graph theory` is the study of graphs and what we can learn from them. There are various types of graphs, each with a set of rules, properties, and possible actions.
Examples of Geometric Deep Learning
- Molecular Modeling and learning:
One of the bottlenecks in computational chemistry, biology, and physics is the representation concepts, entities, and interactions. Our current methods of representing these concepts computationally can be considered “lossy”, since we lose a lot of valuable information. By treating atoms as nodes, and bonds as edges, we can save structural information that can be used downstream in prediction or classification.
- 3D Modeling and Learning
_____no_output_____5 types of bias
https://twitter.com/math_rachel/status/1113203073051033600
https://arxiv.org/pdf/1806.01261.pdf
https://stackoverflow.com/questions/35655267/what-is-inductive-bias-in-machine-learning_____no_output_____
| {
"repository": "phucnsp/blog",
"path": "_notebooks/draft/Graph.ipynb",
"matched_keywords": [
"biology"
],
"stars": 2,
"size": 9644,
"hexsha": "d0af1f162a412b3097be3cb4692f653d0d465af9",
"max_line_length": 423,
"avg_line_length": 34.8158844765,
"alphanum_fraction": 0.6408129407
} |
# Notebook from markovmodel/pyemma_tutorials
Path: notebooks/08-common-problems.ipynb
# 08 - Common problems & bad data situations
<a rel="license" href="http://creativecommons.org/licenses/by/4.0/"><img alt="Creative Commons Licence" style="border-width:0" src="https://i.creativecommons.org/l/by/4.0/88x31.png" title='This work is licensed under a Creative Commons Attribution 4.0 International License.' align="right"/></a>
In this notebook, we will revise common problems that might come up when dealing with real-world data.
Maintainers: [@thempel](https://github.com/thempel), [@cwehmeyer](https://github.com/cwehmeyer), [@marscher](https://github.com/marscher), [@psolsson](https://github.com/psolsson)
**Remember**:
- to run the currently highlighted cell, hold <kbd>⇧ Shift</kbd> and press <kbd>⏎ Enter</kbd>;
- to get help for a specific function, place the cursor within the function's brackets, hold <kbd>⇧ Shift</kbd>, and press <kbd>⇥ Tab</kbd>;
- you can find the full documentation at [PyEMMA.org](http://www.pyemma.org).
---
Most problems in Markov modeling of MD data arise from bad sampling combined with a poor discretization.
For estimating a Markov model, it is required to have a connected data set,
i.e., we must have observed each process we want to describe in both directions.
PyEMMA checks if this requirement is fulfilled but, however, in certain situations this might be less obvious._____no_output_____
<code>
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
import mdshare
import pyemma_____no_output_____
</code>
## Case 1: preprocessed, two-dimensional data (toy model)
### well-sampled double-well potential
Let's again have a look at the double-well potential.
Since we are only interested in the problematic situations here,
we will simplify our data a bit and work with a 1D projection._____no_output_____
<code>
file = mdshare.fetch('hmm-doublewell-2d-100k.npz', working_directory='data')
with np.load(file) as fh:
data = [fh['trajectory'][:, 1]]_____no_output_____
</code>
Since this particular example is simple enough, we can define a plotting function that combines histograms with trajectory data:_____no_output_____
<code>
def plot_1D_histogram_trajectories(data, cluster=None, max_traj_length=200, ax=None):
if ax is None:
fig, ax = plt.subplots()
for n, _traj in enumerate(data):
ax.hist(_traj, bins=30, alpha=.33, density=True, color='C{}'.format(n));
ylims = ax.get_ylim()
xlims = ax.get_xlim()
for n, _traj in enumerate(data):
ax.plot(
_traj[:min(len(_traj), max_traj_length)],
np.linspace(*ylims, min(len(_traj), max_traj_length)),
alpha=0.6, color='C{}'.format(n), label='traj {}'.format(n))
if cluster is not None:
ax.plot(
cluster.clustercenters[cluster.dtrajs[n][:min(len(_traj), max_traj_length)], 0],
np.linspace(*ylims, min(len(_traj), max_traj_length)),
'.-', alpha=.6, label='dtraj {}'.format(n), linewidth=.3)
ax.annotate(
'', xy=(0.8500001 * xlims[1], 0.7 * ylims[1]), xytext=(0.85 * xlims[1], 0.3 * ylims[1]),
arrowprops=dict(fc='C0', ec='None', alpha=0.6, width=2))
ax.text(0.86 * xlims[1], 0.5 * ylims[1], '$x(time)$', ha='left', va='center', rotation=90)
ax.set_xlabel('TICA coordinate')
ax.set_ylabel('histogram counts & trajectory time')
ax.legend(loc=2)_____no_output_____
</code>
As a reference, we visualize the histogram of this well-sampled trajectory along with the first $200$ steps (left panel) and the MSM implied timescales (right panel):_____no_output_____
<code>
fig, axes = plt.subplots(1, 2, figsize=(10, 4))
cluster = pyemma.coordinates.cluster_regspace(data, dmin=0.05)
plot_1D_histogram_trajectories(data, cluster=cluster, ax=axes[0])
lags = [i + 1 for i in range(10)]
its = pyemma.msm.its(cluster.dtrajs, lags=lags)
pyemma.plots.plot_implied_timescales(its, marker='o', ax=axes[1], nits=4)
fig.tight_layout()_____no_output_____
</code>
We see a nice, reversibly connected trajectory.
That means we have sampled transitions between the basins in both directions that are correctly resolved by the discretization.
As we see from the almost perfect overlay of discrete and continuous trajectory, nearly no discretization error is made.
### irreversibly connected double-well trajectories
In MD simulations, we often face the problem that a process is sampled only in one direction.
For example, consider protein-protein binding.
The unbinding might take on the order of seconds to minutes and is thus difficult to sample.
We will have a look what happens with the MSM in this case.
Our example are two trajectories sampled from a double-well potential, each started in a different basin.
They will be color coded._____no_output_____
<code>
file = mdshare.fetch('doublewell_oneway.npy', working_directory='data')
data = [trj for trj in np.load(file)]
plot_1D_histogram_trajectories(data, max_traj_length=data[0].shape[0])_____no_output_____
</code>
We note that the orange trajectory does not leave its potential well while the blue trajectory does overcome the barrier exactly once.
⚠️ Even though we have sampled one direction of the process,
we do not sample the way out of one of the potential wells, thus effectively finding a sink state in our data.
Let's have a look at the MSM.
Since in higher dimensions, we often face the problem of poor discretization,
we will simulate this situation by using too few cluster centers._____no_output_____
<code>
cluster_fine = pyemma.coordinates.cluster_regspace(data, dmin=0.1)
cluster_poor = pyemma.coordinates.cluster_regspace(data, dmin=0.7)
print(cluster_fine.n_clusters, cluster_poor.n_clusters)_____no_output_____fig, axes = plt.subplots(2, 2, figsize=(10, 8), sharey='col')
for cluster, ax in zip([cluster_poor, cluster_fine], axes):
plot_1D_histogram_trajectories(data, cluster=cluster, max_traj_length=data[0].shape[0], ax=ax[0])
its = pyemma.msm.its(cluster.dtrajs, lags=[1, 10, 100, 200, 300, 500, 800, 1000])
pyemma.plots.plot_implied_timescales(its, marker='o', ax=ax[1], nits=4)
axes[0, 0].set_title('poor discretization')
axes[1, 0].set_title('fine discretization')
fig.tight_layout()_____no_output_____
</code>
#### What do we see?
1) We observe implied timescales that even look converged in the fine discretization case.
2) With poor clustering, the process cannot be resolved any more, i.e., the ITS does not convergence before the lag time exceeds the implied time scale.
The obvious question is, what is the process that can be observed in the fine discretization case?
PyEMMA checks for disconnectivity and thus should not find the process between the two wells.
We follow this question by taking a look at the first eigenvector, which corresponds to that process._____no_output_____
<code>
msm = pyemma.msm.estimate_markov_model(cluster_fine.dtrajs, 200)
fig, ax = plt.subplots()
ax.plot(
cluster_fine.clustercenters[msm.active_set, 0],
msm.eigenvectors_right()[:, 1],
'o:',
label='first eigvec')
tx = ax.twinx()
tx.hist(np.concatenate(data), bins=30, alpha=0.33)
tx.set_yticklabels([])
tx.set_yticks([])
fig.legend()
fig.tight_layout()_____no_output_____
</code>
We observe a process which is entirely taking place in the left potential well.
How come?
PyEMMA estimates MSMs only on the largest connected set because they are only defined on connected sets.
In this particular example, the largest connected set is the microstates in the left potential well.
That means that we find a transition between the right and the left side of this well.
This is not wrong, it might just be non-informative or even irrelevant.
The set of microstates which is used for the MSM estimation is stored in the MSM object `msm` and can be retrieved via `.active_set`._____no_output_____
<code>
print('Active set: {}'.format(msm.active_set))
print('Active state fraction: {:.2}'.format(msm.active_state_fraction))_____no_output_____
</code>
In this example we clearly see that some states are missing.
### disconnected double-well trajectories with cross-overs
This example covers the worst-case scenario.
We have two trajectories that live in two separated wells and never transition to the other one.
Due to a very bad clustering, we believe that the data is connected.
This can happen if we cluster a large dataset in very high dimensions where it is especially difficult to debug. _____no_output_____
<code>
file = mdshare.fetch('doublewell_disconnected.npy', working_directory='data')
data = [trj for trj in np.load(file)]
plot_1D_histogram_trajectories(data, max_traj_length=data[0].shape[0])_____no_output_____
</code>
We, again, compare a reasonable to a deliberately poor discretization:_____no_output_____
<code>
cluster_fine = pyemma.coordinates.cluster_regspace(data, dmin=0.1)
cluster_poor = pyemma.coordinates.cluster_regspace(data, dmin=0.7)
print(cluster_fine.n_clusters, cluster_poor.n_clusters)_____no_output_____fig, axes = plt.subplots(2, 2, figsize=(10, 8), sharey='col')
for cluster, ax in zip([cluster_poor, cluster_fine], axes):
plot_1D_histogram_trajectories(data, cluster=cluster, max_traj_length=data[0].shape[0], ax=ax[0])
its = pyemma.msm.its(cluster.dtrajs, lags=[1, 10, 100, 200, 300, 500, 800, 1000])
pyemma.plots.plot_implied_timescales(its, marker='o', ax=ax[1], nits=4)
axes[0, 0].set_title('poor discretization')
axes[1, 0].set_title('fine discretization')
fig.tight_layout()_____no_output_____
</code>
#### What do we see?
1) With the fine discretization, we observe some timescales that are converged. These are most probably processes within one of the wells, similar to the ones we saw before.
2) The poor discretization induces a large error and describes artificial short visits to the other basin.
3) The timescales in the poor discretization are much higher but not converged.
The reason for the high timescales in 3) are in fact the artificial cross-over events created by the poor discretization.
This process was not actually sampled and is an artifact of bad clustering.
Let's look at it in more detail and see what happens if we estimate an MSM and even compute metastable states with PCCA++._____no_output_____
<code>
msm = pyemma.msm.estimate_markov_model(cluster_poor.dtrajs, 200)
nstates = 2
msm.pcca(nstates)
index_order = np.argsort(cluster_poor.clustercenters[:, 0])
fig, axes = plt.subplots(1, 3, figsize=(12, 3))
axes[0].plot(
cluster_poor.clustercenters[index_order, 0],
msm.eigenvectors_right()[index_order, 1],
'o:',
label='1st eigvec')
axes[0].set_title('first eigenvector')
for n, metastable_distribution in enumerate(msm.metastable_distributions):
axes[1].step(
cluster_poor.clustercenters[index_order, 0],
metastable_distribution[index_order],
':',
label='md state {}'.format(n + 1),
where='mid')
axes[1].set_title('metastable distributions (md)')
axes[2].step(
cluster_poor.clustercenters[index_order, 0],
msm.pi[index_order],
'k--',
label='$\pi$',
where='mid')
axes[2].set_title('stationary distribution $\pi$')
for ax in axes:
tx = ax.twinx()
tx.hist(np.concatenate(data), bins=30, alpha=0.33)
tx.set_yticklabels([])
tx.set_yticks([])
fig.legend(loc=7)
fig.tight_layout()_____no_output_____
</code>
We observe that the first eigenvector represents a process that does not exist, i.e., is an artifact.
Nevertheless, the PCCA++ algorithm can separate metastable states in a way we would expect.
It finds the two disconnected states. However, the stationary distribution yields arbitrary results.
#### How to detect disconnectivity?
Generally, hidden Markov models (HMMs) are much more reliable because they come with an additional layer of hidden states.
Cross-over events are thus unlikely to be counted as "real" transitions.
Thus, it is a good idea to estimate an HMM.
What happens if we try to estimate a two state HMM on the same, poorly discretized data?
⚠️ It is important to note that the HMM estimation is initialized from the PCCA++ metastable states that we already analyzed._____no_output_____
<code>
hmm = pyemma.msm.estimate_hidden_markov_model(cluster_poor.dtrajs, nstates, msm.lag)_____no_output_____
</code>
We are getting an error message which already explains what is going wrong, i.e.,
that the (macro-) states are not connected and thus no unique stationary distribution can be estimated.
This is equivalent to having two eigenvalues of magnitude 1 or an implied timescale of infinity which is what we observe in the implied timescales plot._____no_output_____
<code>
its = pyemma.msm.timescales_hmsm(cluster_poor.dtrajs, nstates, lags=[1, 3, 4, 10, 100])
pyemma.plots.plot_implied_timescales(its, marker='o', ylog=True);_____no_output_____
</code>
As we see, the requested timescales above $4$ steps could not be computed because the underlying HMM is disconnected,
i.e., the corresponding timescales are infinity.
The implied timescales that could be computed are most likely the same process that we observed from the fine clustering before, i.e., jumps within one basin.
In general, it is a non-trivial problem to show that processes were not sampled reversibly.
In our experience, HMMs are a good choice here, even though situations can occur where they might not detect the problem as easily as in this example.
<a id="poorly_sampled_dw"></a>
### poorly sampled double-well trajectories
Let's now assume that everything worked out fine but our sampling is somewhat poor.
This is a realistic scenario when dealing with large systems that were well-sampled but still contain only few events of interest.
We expect that our trajectories are just long enough to sample a certain process but are too short to capture them with a large lag time.
To rule out discretization issues and to make the example clear, we use the full data set for discretization._____no_output_____
<code>
file = mdshare.fetch('hmm-doublewell-2d-100k.npz', working_directory='data')
with np.load(file) as fh:
data = [fh['trajectory'][:, 1]]
cluster = pyemma.coordinates.cluster_regspace(data, dmin=0.05)_____no_output_____
</code>
We want to simulate a process that happens on a timescale that is on the order of magnitude of the trajectory length.
To do so, we choose `n_trajs` chunks from the full data set that contain `traj_length` steps by splitting the original trajectory:_____no_output_____
<code>
traj_length = 10
n_trajs = 50
data_short_trajs = list(data[0].reshape((data[0].shape[0] // traj_length, traj_length)))[:n_trajs]
dtrajs_short = list(cluster.dtrajs[0].reshape((data[0].shape[0] // traj_length, traj_length)))[:n_trajs]_____no_output_____
</code>
Now, let's plot the trajectories (left panel) and estimate implied timescales (right panel) as above.
Since we know the true ITS of this process, we visualize it as a dotted line._____no_output_____
<code>
fig, axes = plt.subplots(1, 2, figsize=(10, 4))
for n, _traj in enumerate(data_short_trajs):
axes[0].plot(_traj, np.linspace(0, 1, _traj.shape[0]) + n)
lags = [i + 1 for i in range(9)]
its = pyemma.msm.its(dtrajs_short, lags=lags)
pyemma.plots.plot_implied_timescales(its, marker='o', ax=axes[1], nits=1)
its_reference = pyemma.msm.its(cluster.dtrajs, lags=lags)
pyemma.plots.plot_implied_timescales(its_reference, linestyle=':', ax=axes[1], nits=1)
fig.tight_layout()_____no_output_____
</code>
We note that the slowest process is clearly contained in the data chunks and is reversibly sampled (left panel, short trajectory pieces color coded and stacked).
Due to very short trajectories, we find that this process can only be captured at a very short MSM lag time (right panel).
Above that interval, the slowest timescale diverges.
Luckily, here we know that it is already converged at $\tau = 1$, so we estimate an MSM:_____no_output_____
<code>
msm_short_trajectories = pyemma.msm.estimate_markov_model(dtrajs_short, 1)_____no_output_____
</code>
Let's now have a look at the CK-test:_____no_output_____
<code>
pyemma.plots.plot_cktest(msm_short_trajectories.cktest(2), marker='.');_____no_output_____
</code>
As already discussed, we cannot expect new estimates above a certain lag time to agree with the model prediction due to too short trajectories.
Indeed, we find that new estimates and model predictions diverge at very high lag times.
This does not necessarily mean that the model at $\tau=1$ is wrong and in this particular case,
we can even explain the divergence and find that it fits to the implied timescales divergence.
This example mirrors another incarnation of the sampling problem: Working with large systems,
we often have comparably short trajectories with few rare events.
Thus, implied timescales convergence can often be achieved only in a certain interval and CK-tests will not converge up to arbitrary multiples of the lag time.
It is the responsibility of the modeler to interpret these results and to ensure that a valid model can be obtained from the data.
Please note that this is only a special case of a failed CK test.
More general information about CK tests and what it means if it fails are explained in
[Notebook 03 ➜ 📓](03-msm-estimation-and-validation.ipynb).
## Case 2: low-dimensional molecular dynamics data (alanine dipeptide)
In this example, we will show how an ill-conducted TICA analysis can yield results that look metastable in the 2D histogram,
but in fact are not describing the slow dynamics.
Please note that this was deliberately broken with a nonsensical TICA-lagtime of almost trajectory length, which is 250 ns.
We start off with adding all atom coordinates.
That is a non-optimal choice because it artificially blows up the dimensionality,
but might still be a reasonable choice depending on the problem.
A well-conducted TICA projection can extract the slow coordinates, as we will see at the end of this example._____no_output_____
<code>
pdb = mdshare.fetch('alanine-dipeptide-nowater.pdb', working_directory='data')
files = mdshare.fetch('alanine-dipeptide-*-250ns-nowater.xtc', working_directory='data')
feat = pyemma.coordinates.featurizer(pdb)
feat.add_all()
data = pyemma.coordinates.load(files, features=feat)_____no_output_____
</code>
TICA analysis is conducted with an extremely high lag time of almost $249.9$ ns. We map down to two dimensions._____no_output_____
<code>
tica = pyemma.coordinates.tica(data, lag=data[0].shape[0] - 100, dim=2)
tica_output = tica.get_output()
pyemma.plots.plot_free_energy(*np.concatenate(tica_output).T, legacy=False);_____no_output_____
</code>
In the free energy plot, we recognize two defined basins that are nicely separated by the first TICA component. We thus continue with a discretization of this space and estimate MSM implied timescales._____no_output_____
<code>
cluster = pyemma.coordinates.cluster_kmeans(tica_output, k=200, max_iter=30, stride=100)_____no_output_____its = pyemma.msm.its(cluster.dtrajs, lags=[1, 5, 10, 20, 30, 50])
pyemma.plots.plot_implied_timescales(its, marker='o', units='ps', nits=3);_____no_output_____
</code>
Indeed, we observe a converged implied timescale.
In this example we already know that it is way lower than expected,
but in the general case we are unaware of the real dynamics of the system.
Thus, we estimate an MSM at lag time $20 $ ps.
Coarse graining and validation will be done with $2$ metastable states since we found $2$ basins in the free energy landscape and have one slow process in the ITS plot._____no_output_____
<code>
msm = pyemma.msm.estimate_markov_model(cluster.dtrajs, 20)
nstates = 2
msm.pcca(nstates);_____no_output_____stride = 10
metastable_trajs_strided = [msm.metastable_assignments[dtrj[::stride]] for dtrj in cluster.dtrajs]
tica_output_strided = [i[::stride] for i in tica_output]
_, _, misc = pyemma.plots.plot_state_map(*np.concatenate(tica_output_strided).T,
np.concatenate(metastable_trajs_strided));
misc['cbar'].set_ticklabels(range(1, nstates + 1)) # set state numbers 1 ... nstates_____no_output_____
</code>
As we see, the PCCA++ algorithm is perfectly able to separate the two basins.
Let's go on with a Chapman-Kolmogorow validation._____no_output_____
<code>
pyemma.plots.plot_cktest(msm.cktest(nstates), units='ps');_____no_output_____
</code>
Congratulations, we have estimated a well-validated MSM.
The only question remaining is: What does it actually describe?
For this, we usually extract representative structures as described in [Notebook 00 ➜ 📓](00-pentapeptide-showcase.ipynb).
We will not do this here but look at the metastable trajectories instead.
#### What could be wrong about it?
Let's have a look at the trajectories as assigned to PCCA++ metastable states.
We have already computed them before but not looked at their time dependence._____no_output_____
<code>
fig, ax = plt.subplots(1, 1, figsize=(15, 6), sharey=True, sharex=True)
ax_yticks_labels = []
for n, pcca_traj in enumerate(metastable_trajs_strided):
ax.plot(range(len(pcca_traj)), msm.n_metastable * n + pcca_traj, color='k', linewidth=0.3)
ax.scatter(range(len(pcca_traj)), msm.n_metastable * n + pcca_traj, c=pcca_traj, s=0.1)
ax_yticks_labels.append(((msm.n_metastable * (2 * n + 1) - 1) / 2, n + 1))
ax.set_yticks([l[0] for l in ax_yticks_labels])
ax.set_yticklabels([str(l[1]) for l in ax_yticks_labels])
ax.set_ylabel('Trajectory #')
ax.set_xlabel('time / {} ps'.format(stride))
fig.tight_layout()_____no_output_____
</code>
#### What do we see?
The above figure shows the metastable states visited by the trajectory over time.
Each metastable state is color-coded, the trajectory is shown by the black line.
This is clearly not a metastable trajectory as we would have expected.
What did we do wrong?
Let's have a look at the TICA trajectories, not only the histogram!_____no_output_____
<code>
fig, axes = plt.subplots(2, 3, figsize=(12, 6), sharex=True, sharey='row')
for n, trj in enumerate(tica_output):
for dim, traj1d in enumerate(trj.T):
axes[dim, n].plot(traj1d[::stride], linewidth=.5)
for ax in axes[1]:
ax.set_xlabel('time / {} ps'.format(stride))
for dim, ax in enumerate(axes[:, 0]):
ax.set_ylabel('IC {}'.format(dim + 1))
for n, ax in enumerate(axes[0]):
ax.set_title('Trajectory # {}'.format(n + 1))
fig.tight_layout()_____no_output_____
</code>
This is essentially noise, so it is not surprising that the metastable trajectories do not show significant metastability.
The MSM nevertheless found a process in the above TICA components which, however,
does not seem to describe any of the slow dynamics.
Thus, the model is not wrong, it is just not informative.
As we see in this example, it can be instructive to keep the trajectories in mind and not to rely on the histograms alone.
⚠️ Histograms are no proof of metastability,
they can only give us a hint towards defined states in a multi-dimensional state space which can be metastable.
#### How to fix it?
In this particular example, we already know the issue:
the TICA lag time was deliberately chosen way too high.
That's easy to fix.
Let's now have a look at how the metastable trajectories should look for a decent model such as the one estimated in [Notebook 05 ➜ 📓](05-pcca-tpt.ipynb).
We will take the same input data,
do a TICA transform with a realistic lag time of $10$ ps,
and coarse grain into $2$ metastable states in order to compare with the example above._____no_output_____
<code>
tica = pyemma.coordinates.tica(data, lag=10, dim=2)
tica_output = tica.get_output()
cluster = pyemma.coordinates.cluster_kmeans(tica_output, k=200, max_iter=30, stride=100)
pyemma.plots.plot_free_energy(*np.concatenate(tica_output).T, legacy=False);_____no_output_____
</code>
As wee see, TICA yields a very nice state separation.
We will see that these states are in fact metastable._____no_output_____
<code>
msm = pyemma.msm.estimate_markov_model(cluster.dtrajs, lag=20)
msm.pcca(nstates);_____no_output_____metastable_trajs_strided = [msm.metastable_assignments[dtrj[::stride]] for dtrj in cluster.dtrajs]_____no_output_____stride = 10
tica_output_strided = [i[::stride] for i in tica_output]
_, _, misc = pyemma.plots.plot_state_map(*np.concatenate(tica_output_strided).T,
np.concatenate(metastable_trajs_strided));
misc['cbar'].set_ticklabels(range(1, nstates + 1)) # set state numbers 1 ... nstates_____no_output_____
</code>
We note that PCCA++ separates the two basins of the free energy plot.
Let's have a look at the metastable trajectories:_____no_output_____
<code>
fig, ax = plt.subplots(1, 1, figsize=(12, 6), sharey=True, sharex=True)
ax_yticks_labels = []
for n, pcca_traj in enumerate(metastable_trajs_strided):
ax.plot(range(len(pcca_traj)), msm.n_metastable * n + pcca_traj, color='k', linewidth=0.3)
ax.scatter(range(len(pcca_traj)), msm.n_metastable * n + pcca_traj, c=pcca_traj, s=0.1)
ax_yticks_labels.append(((msm.n_metastable * (2 * n + 1) - 1) / 2, n + 1))
ax.set_yticks([l[0] for l in ax_yticks_labels])
ax.set_yticklabels([str(l[1]) for l in ax_yticks_labels])
ax.set_ylabel('Trajectory #')
ax.set_xlabel('time / {} ps'.format(stride))
fig.tight_layout()_____no_output_____
</code>
These trajectories show the expected behavior of a metastable trajectory,
i.e., it does not quickly jump back and forth between the states.
## Wrapping up
In this notebook, we have learned about some problems that can arise when estimating MSMs with "real world" data at simple examples.
In detail, we have seen
- irreversibly connected dynamics and what it means for MSM estimation,
- fully disconnected trajectories and how to identify them,
- connected but poorly sampled trajectories and how convergence looks in this case,
- ill-conducted TICA analysis and what it yields.
The most important lesson from this tutorial is that histograms, which are usually calculated in a projected space, are not a sufficient means of identifying metastability or connectedness.
It is crucial to remember that the underlying trajectories play the role of ground truth for the model.
Ultimately, histograms only help us to understand this ground truth but cannot provide a complete picture._____no_output_____
| {
"repository": "markovmodel/pyemma_tutorials",
"path": "notebooks/08-common-problems.ipynb",
"matched_keywords": [
"molecular dynamics"
],
"stars": 49,
"size": 35454,
"hexsha": "d0af280c5bd662018d79480f05713523e4cdcb15",
"max_line_length": 316,
"avg_line_length": 38.2459546926,
"alphanum_fraction": 0.6259096294
} |
# Notebook from antgonza/metagenomics_pooling_notebook
Path: notebooks/amplicon_samplesheet_generator.ipynb
<code>
%reload_ext watermark
%matplotlib inline
from os.path import exists
from metapool.metapool import *
from metapool import (validate_plate_metadata, assign_emp_index, make_sample_sheet, KLSampleSheet, parse_prep, validate_and_scrub_sample_sheet, generate_qiita_prep_file)
%watermark -i -v -iv -m -h -p metapool,sample_sheet,openpyxl -uLast updated: 2021-12-15T17:15:09.841175-06:00
Python implementation: CPython
Python version : 3.9.7
IPython version : 7.30.1
metapool : 0+untagged.112.g8fed443.dirty
sample_sheet: 0.12.0
openpyxl : 3.0.9
Compiler : Clang 10.0.0
OS : Darwin
Release : 20.6.0
Machine : x86_64
Processor : i386
CPU cores : 12
Architecture: 64bit
Hostname: Kelly-Fogelsons-MacBook-Pro.local
seaborn : 0.11.2
matplotlib: 3.5.0
re : 2.2.1
pandas : 1.3.4
numpy : 1.21.2
</code>
# Knight Lab Amplicon Sample Sheet and Mapping (preparation) File Generator
### What is it?
This Jupyter Notebook allows you to automatically generate sample sheets for amplicon sequencing.
### Here's how it should work.
You'll start out with a **basic plate map** (platemap.tsv) , which just links each sample to it's approprite row and column.
You can use this google sheet template to generate your plate map:
https://docs.google.com/spreadsheets/d/1xPjB6iR3brGeG4bm2un4ISSsTDxFw5yME09bKqz0XNk/edit?usp=sharing
Next you'll automatically assign EMP barcodes in order to produce a **sample sheet** (samplesheet.csv) that can be used in combination with the rest of the sequence processing pipeline.
**Please designate what kind of amplicon sequencing you want to perform:**_____no_output_____
<code>
seq_type = '16S'
#options are ['16S', '18S', 'ITS']_____no_output_____
</code>
## Step 1: read in plate map
**Enter the correct path to the plate map file**. This will serve as the plate map for relating all subsequent information._____no_output_____
<code>
plate_map_fp = './test_data/amplicon/compressed-map.tsv'
if not exists(plate_map_fp):
print("Error: %s is not a path to a valid file" % plate_map_fp)_____no_output_____
</code>
**Read in the plate map**. It should look something like this:
```
Sample Row Col Blank
GLY_01_012 A 1 False
GLY_14_034 B 1 False
GLY_11_007 C 1 False
GLY_28_018 D 1 False
GLY_25_003 E 1 False
GLY_06_106 F 1 False
GLY_07_011 G 1 False
GLY_18_043 H 1 False
GLY_28_004 I 1 False
```
**Make sure there a no duplicate IDs.** If each sample doesn't have a different name, an error will be thrown and you won't be able to generate a sample sheet._____no_output_____
<code>
plate_df = read_plate_map_csv(open(plate_map_fp,'r'))
plate_df.head()_____no_output_____
</code>
# Assign barcodes according to primer plate
This portion of the notebook will assign a barcode to each sample according to the primer plate number.
As inputs, it requires:
1. A plate map dataframe (from previous step)
2. Preparation metadata for the plates, importantly we need the Primer Plate # so we know what **EMP barcodes** to assign to each plate.
The workflow then:
1. Joins the preparation metadata with the plate metadata.
2. Assigns indices per sample_____no_output_____## Enter and validate the plating metadata
- In general you will want to update all the fields, but the most important ones are the `Primer Plate #` and the `Plate Position`. `Primer Plate #` determines which EMP barcodes will be used for this plate. `Plate Position` determines the physical location of the plate.
- If you are plating less than four plates, then remove the metadata for that plate by deleting the text between the curly braces.
- For missing fields, write NA between the single quotes for example `'NA'`.
- To enter a plate copy and paste the contents from the plates below._____no_output_____
<code>
_metadata = [
{
# top left plate
'Plate Position': '1',
'Primer Plate #': '1',
'Sample Plate': 'THDMI_UK_Plate_2',
'Project_Name': 'THDMI UK',
'Plating': 'SF',
'Extraction Kit Lot': '166032128',
'Extraction Robot': 'Carmen_HOWE_KF3',
'TM1000 8 Tool': '109379Z',
'Primer Date': '2021-08-17', # yyyy-mm-dd
'MasterMix Lot': '978215',
'Water Lot': 'RNBJ0628',
'Processing Robot': 'Echo550',
'Original Name': ''
},
{
# top right plate
'Plate Position': '2',
'Primer Plate #': '2',
'Sample Plate': 'THDMI_UK_Plate_3',
'Project_Name': 'THDMI UK',
'Plating':'AS',
'Extraction Kit Lot': '166032128',
'Extraction Robot': 'Carmen_HOWE_KF4',
'TM1000 8 Tool': '109379Z',
'Primer Date': '2021-08-17', # yyyy-mm-dd
'MasterMix Lot': '978215',
'Water Lot': 'RNBJ0628',
'Processing Robot': 'Echo550',
'Original Name': ''
},
{
# bottom left plate
'Plate Position': '3',
'Primer Plate #': '3',
'Sample Plate': 'THDMI_UK_Plate_4',
'Project_Name': 'THDMI UK',
'Plating':'MB_SF',
'Extraction Kit Lot': '166032128',
'Extraction Robot': 'Carmen_HOWE_KF3',
'TM1000 8 Tool': '109379Z',
'Primer Date': '2021-08-17', # yyyy-mm-dd
'MasterMix Lot': '978215',
'Water Lot': 'RNBJ0628',
'Processing Robot': 'Echo550',
'Original Name': ''
},
{
# bottom right plate
'Plate Position': '4',
'Primer Plate #': '4',
'Sample Plate': 'THDMI_US_Plate_6',
'Project_Name': 'THDMI US',
'Plating':'AS',
'Extraction Kit Lot': '166032128',
'Extraction Robot': 'Carmen_HOWE_KF4',
'TM1000 8 Tool': '109379Z',
'Primer Date': '2021-08-17', # yyyy-mm-dd
'MasterMix Lot': '978215',
'Water Lot': 'RNBJ0628',
'Processing Robot': 'Echo550',
'Original Name': ''
},
]
plate_metadata = validate_plate_metadata(_metadata)
plate_metadata_____no_output_____
</code>
The `Plate Position` and `Primer Plate #` allow us to figure out which wells are associated with each of the EMP barcodes._____no_output_____
<code>
if plate_metadata is not None:
plate_df = assign_emp_index(plate_df, plate_metadata, seq_type).reset_index()
plate_df.head()
else:
print('Error: Please fix the errors in the previous cell')_____no_output_____
</code>
As you can see in the table above, the resulting table is now associated with the corresponding EMP barcodes (`Golay Barcode`, `Forward Primer Linker`, etc), and the plating metadata (`Primer Plate #`, `Primer Date`, `Water Lot`, etc)._____no_output_____
<code>
plate_df.head()_____no_output_____
</code>
# Combine plates (optional)
If you would like to combine existing plates with these samples, enter the path to their corresponding sample sheets and mapping (preparation) files below. Otherwise you can skip to the next section.
- sample sheet and mapping (preparation)_____no_output_____
<code>
files = [
# uncomment the line below and point to the correct filepaths to combine with previous plates
# ['test_output/amplicon/2021_08_17_THDMI-4-6_samplesheet.csv', 'test_output/amplicon/2021-08-01-515f806r_prep.tsv'],
]
sheets, preps = [], []
for sheet, prep in files:
sheets.append(KLSampleSheet(sheet))
preps.append(parse_prep(prep))
if len(files):
print('%d pair of files loaded' % len(files))_____no_output_____
</code>
# Make Sample Sheet
This workflow takes the pooled sample information and writes an Illumina sample sheet that can be given directly to the sequencing center or processing pipeline. Note that as of writing `bcl2fastq` does not support error-correction in Golay barcodes so the sample sheet is used to generate a mapping (preparation) file but not to demultiplex sequences. Demultiplexing takes place in [Qiita](https://qiita.ucsd.edu).
As inputs, this notebook requires:
1. A plate map DataFrame (from previous step)
The workflow:
1. formats sample names as bcl2fastq-compatible
2. formats sample data
3. sets values for sample sheet fields and formats sample sheet.
4. writes the sample sheet to a file_____no_output_____## Step 1: Format sample names to be bcl2fastq-compatible
bcl2fastq requires *only* alphanumeric, hyphens, and underscore characters. We'll replace all non-those characters
with underscores and add the bcl2fastq-compatible names to the DataFrame._____no_output_____
<code>
plate_df['sample sheet Sample_ID'] = plate_df['Sample'].map(bcl_scrub_name)
plate_df.head()_____no_output_____
</code>
## Format the sample sheet data
This step formats the data columns appropriately for the sample sheet, using the values we've calculated previously.
The newly-created `bcl2fastq`-compatible names will be in the `Sample ID` and `Sample Name` columns. The original sample names will be in the Description column.
Modify lanes to indicate which lanes this pool will be sequenced on.
The `Project Name` and `Project Plate` columns will be placed in the `Sample_Project` and `Sample_Name` columns, respectively.
sequencer is important for making sure the i5 index is in the correct orientation for demultiplexing. `HiSeq4000`, `HiSeq3000`, `NextSeq`, and `MiniSeq` all require reverse-complemented i5 index sequences. If you enter one of these exact strings in for sequencer, it will revcomp the i5 sequence for you.
`HiSeq2500`, `MiSeq`, and `NovaSeq` will not revcomp the i5 sequence._____no_output_____
<code>
sequencer = 'HiSeq4000'
lanes = [1]
metadata = {
'Bioinformatics': [
{
'Sample_Project': 'THDMI_10317',
'QiitaID': '10317',
'BarcodesAreRC': 'False',
'ForwardAdapter': '',
'ReverseAdapter': '',
'HumanFiltering': 'True',
'library_construction_protocol': 'Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4',
'experiment_design_description': 'Equipment',
},
],
'Contact': [
{
'Sample_Project': 'THDMI_10317',
# non-admin contacts who want to know when the sequences
# are available in Qiita
'Email': '[email protected],[email protected]'
},
],
'Chemistry': 'Amplicon',
'Assay': 'TruSeq HT',
}
sheet = make_sample_sheet(metadata, plate_df, sequencer, lanes)
sheet.Settings['Adapter'] = 'AGATCGGAAGAGCACACGTCTGAACTCCAGTCA'
sheet.Settings['AdapterRead2'] = 'AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT'/Users/kfolelso/Documents/metagenomics_pooling_notebook_troubleshooting/metapool/sample_sheet.py:473: UserWarning: The column I5_Index_ID in the sample sheet is empty
warnings.warn('The column %s in the sample sheet is empty' %
/Users/kfolelso/Documents/metagenomics_pooling_notebook_troubleshooting/metapool/sample_sheet.py:473: UserWarning: The column index2 in the sample sheet is empty
warnings.warn('The column %s in the sample sheet is empty' %
</code>
Check for any possible errors in the sample sheet_____no_output_____
<code>
sheet = validate_and_scrub_sample_sheet(sheet)_____no_output_____
</code>
Add the other sample sheets_____no_output_____
<code>
if len(sheets):
sheet.merge(sheets)_____no_output_____
</code>
## Step 3: Write the sample sheet to file_____no_output_____
<code>
# write sample sheet as .csv
sample_sheet_fp = './test_output/amplicon/2021_08_17_THDMI-4-6_samplesheet16S.csv'
if exists(sample_sheet_fp):
print("Warning! This file exists already.")_____no_output_____with open(sample_sheet_fp,'w') as f:
sheet.write(f)
!head -n 30 {sample_sheet_fp}
!echo ...
!tail -n 15 {sample_sheet_fp}[Header],,,,,,,,,,
IEMFileVersion,4,,,,,,,,,
Date,2021-12-15,,,,,,,,,
Workflow,GenerateFASTQ,,,,,,,,,
Application,FASTQ Only,,,,,,,,,
Assay,TruSeq HT,,,,,,,,,
Description,,,,,,,,,,
Chemistry,Amplicon,,,,,,,,,
,,,,,,,,,,
[Reads],,,,,,,,,,
151,,,,,,,,,,
151,,,,,,,,,,
,,,,,,,,,,
[Settings],,,,,,,,,,
ReverseComplement,0,,,,,,,,,
Adapter,AGATCGGAAGAGCACACGTCTGAACTCCAGTCA,,,,,,,,,
AdapterRead2,AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT,,,,,,,,,
,,,,,,,,,,
[Data],,,,,,,,,,
Sample_ID,Sample_Name,Sample_Plate,Sample_Well,I7_Index_ID,index,Sample_Project,Well_description,I5_Index_ID,index2,Lane
X00180471,X00180471,THDMI_10317_PUK2,A1,515rcbc0,AGCCTTCGTCGC,THDMI_10317,X00180471,,,1
X00180199,X00180199,THDMI_10317_PUK2,C1,515rcbc12,CGTATAAATGCG,THDMI_10317,X00180199,,,1
X00179789,X00179789,THDMI_10317_PUK2,E1,515rcbc24,TGACTAATGGCC,THDMI_10317,X00179789,,,1
X00180201,X00180201,THDMI_10317_PUK2,G1,515rcbc36,GTGGAGTCTCAT,THDMI_10317,X00180201,,,1
X00180464,X00180464,THDMI_10317_PUK2,I1,515rcbc48,TGATGTGCTAAG,THDMI_10317,X00180464,,,1
X00179796,X00179796,THDMI_10317_PUK2,K1,515rcbc60,TGTGCACGCCAT,THDMI_10317,X00179796,,,1
X00179888,X00179888,THDMI_10317_PUK2,M1,515rcbc72,GGTGAGCAAGCA,THDMI_10317,X00179888,,,1
X00179969,X00179969,THDMI_10317_PUK2,O1,515rcbc84,CTATGTATTAGT,THDMI_10317,X00179969,,,1
BLANK2_2A,BLANK2.2A,THDMI_10317_PUK2,A3,515rcbc1,TCCATACCGGAA,THDMI_10317,BLANK2.2A,,,1
BLANK2_2B,BLANK2.2B,THDMI_10317_PUK2,C3,515rcbc13,ATGCTGCAACAC,THDMI_10317,BLANK2.2B,,,1
...
X00179670,X00179670,THDMI_10317_PUS6,F24,515rcbc323,GTCAGTATGGCT,THDMI_10317,X00179670,,,1
X00179548,X00179548,THDMI_10317_PUS6,H24,515rcbc335,GTCCTCGCGACT,THDMI_10317,X00179548,,,1
X00179326,X00179326,THDMI_10317_PUS6,J24,515rcbc347,CGTTCGCTAGCC,THDMI_10317,X00179326,,,1
X00179165,X00179165,THDMI_10317_PUS6,L24,515rcbc359,TGCCTGCTCGAC,THDMI_10317,X00179165,,,1
X00179035,X00179035,THDMI_10317_PUS6,N24,515rcbc371,TCTTACCCATAA,THDMI_10317,X00179035,,,1
X00179260,X00179260,THDMI_10317_PUS6,P24,515rcbc383,TGTGCTTGTAGG,THDMI_10317,X00179260,,,1
,,,,,,,,,,
[Bioinformatics],,,,,,,,,,
Sample_Project,QiitaID,BarcodesAreRC,ForwardAdapter,ReverseAdapter,HumanFiltering,library_construction_protocol,experiment_design_description,,,
THDMI_10317,10317,False,,,True,"Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4",Equipment,,,
,,,,,,,,,,
[Contact],,,,,,,,,,
Sample_Project,Email,,,,,,,,,
THDMI_10317,"[email protected],[email protected]",,,,,,,,,
,,,,,,,,,,
</code>
# Create a mapping (preparation) file for Qiita_____no_output_____
<code>
output_filename = 'test_output/amplicon/2021-08-01-515f806r_prep.tsv'_____no_output_____qiita_df = generate_qiita_prep_file(plate_df, seq_type)
qiita_df.head()_____no_output_____qiita_df.set_index('sample_name', verify_integrity=True).to_csv(output_filename, sep='\t')_____no_output_____
</code>
Add the previous sample sheets_____no_output_____
<code>
if len(preps):
prep = prep.append(preps, ignore_index=True)_____no_output_____!head -n 5 {output_filename}sample_name barcode primer primer_plate well_id plating extractionkit_lot extraction_robot tm1000_8_tool primer_date mastermix_lot water_lot processing_robot tm300_8_tool tm50_8_tool sample_plate project_name orig_name well_description experiment_design_description library_construction_protocol linker platform run_center run_date run_prefix pcr_primers sequencing_meth target_gene target_subfragment center_name center_project_name instrument_model runid
X00180471 AGCCTTCGTCGC GTGYCAGCMGCCGCGGTAA 1 A1 SF 166032128 Carmen_HOWE_KF3 109379Z 2021-08-17 978215 RNBJ0628 Echo550 THDMI_UK_Plate_2 THDMI_10317 X00180471 THDMI_UK_Plate_2.X00180471.A1 Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4 GT Illumina UCSDMI FWD:GTGYCAGCMGCCGCGGTAA; REV:GGACTACNVGGGTWTCTAAT Sequencing by synthesis 16S rRNA V4 UCSDMI
X00180199 CGTATAAATGCG GTGYCAGCMGCCGCGGTAA 1 C1 SF 166032128 Carmen_HOWE_KF3 109379Z 2021-08-17 978215 RNBJ0628 Echo550 THDMI_UK_Plate_2 THDMI_10317 X00180199 THDMI_UK_Plate_2.X00180199.C1 Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4 GT Illumina UCSDMI FWD:GTGYCAGCMGCCGCGGTAA; REV:GGACTACNVGGGTWTCTAAT Sequencing by synthesis 16S rRNA V4 UCSDMI
X00179789 TGACTAATGGCC GTGYCAGCMGCCGCGGTAA 1 E1 SF 166032128 Carmen_HOWE_KF3 109379Z 2021-08-17 978215 RNBJ0628 Echo550 THDMI_UK_Plate_2 THDMI_10317 X00179789 THDMI_UK_Plate_2.X00179789.E1 Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4 GT Illumina UCSDMI FWD:GTGYCAGCMGCCGCGGTAA; REV:GGACTACNVGGGTWTCTAAT Sequencing by synthesis 16S rRNA V4 UCSDMI
X00180201 GTGGAGTCTCAT GTGYCAGCMGCCGCGGTAA 1 G1 SF 166032128 Carmen_HOWE_KF3 109379Z 2021-08-17 978215 RNBJ0628 Echo550 THDMI_UK_Plate_2 THDMI_10317 X00180201 THDMI_UK_Plate_2.X00180201.G1 Illumina EMP protocol 515fbc, 806r amplification of 16S rRNA V4 GT Illumina UCSDMI FWD:GTGYCAGCMGCCGCGGTAA; REV:GGACTACNVGGGTWTCTAAT Sequencing by synthesis 16S rRNA V4 UCSDMI
</code>
| {
"repository": "antgonza/metagenomics_pooling_notebook",
"path": "notebooks/amplicon_samplesheet_generator.ipynb",
"matched_keywords": [
"bioinformatics"
],
"stars": 8,
"size": 58112,
"hexsha": "d0af64015702fb600139868cbe54ea9bf530ae1b",
"max_line_length": 502,
"avg_line_length": 37.3951093951,
"alphanum_fraction": 0.4745835628
} |
# Notebook from RSG-Ecuador/Grupo-De-Estudio-Linux-Bash-
Path: Sesion1_Que_es_Linux/deberes/jupyter.ipynb
# Configuraciones para el Grupo de Estudio
<img src="./img/f_mail.png" style="width: 700px;"/>
## Contenidos
- ¿Por qué jupyter notebooks?
- Bash
- ¿Que es un *kernel*?
- Instalación
- Deberes_____no_output_____## Python y proyecto Jupyter
<img src="./img/py.jpg" style="width: 500px;"/>
<img src="./img/jp.png" style="width: 100px;"/>
- Necesitamos llevar un registro del avance de cada integrante.
- Lenguaje de programación interpretado de alto nivel.
- Jupyter notebooks: son fáciles de usar
- `Necesitamos que todos tengan una versión de Python con jupyter lab`_____no_output_____## ¿Cómo funciona Jupyter?
- Es un derivado del proyecto `iPython`, que ofrece una interfaz interactiva para programadores.
- Tiene formato `.ipynb`
- Es posible usar otros lenguajes de programación diferentes a Python.
- Permite al usuario configurar cómo se visualiza su código mediante `Markdown`.
- Ahora una demostración
<img src="./img/jupex.png" style="width: 500px;"/>_____no_output_____
<code>
import matplotlib.pyplot as plt
import numpy as np
import math
# constantes
pi = math.pi; h = 6.626e-34; kB = 1.380e-23; c = 3.0e+8;
Temps = [9940.00, 8500.00, 7500.00, 6627.00, 5810.93, 4231.15, 3000.00, 2973.15, 288.15]
labels = ['Sirius', 'White star', 'Yellow-white star', 'Polaris', 'Sol', 'HfC', 'Bombilla', 'TaN', 'Atmósfera ']
colors = ['r','g','#FF9633','c','m','#eeefff','y','b','k']
# arreglo de frecuencias
freq = np.arange(0.25e14,3e15,0.25e14)
# funcion spectral energy density (SED)
def SED(f, T):
energyDensity = ( 8*pi*h*(np.power(f, 3.0))/(c**3) ) / (np.exp((h/kB)*f/T) - 1)
return energyDensity
# Calculo de SED para temperaturas
for i in range(len(Temps)):
r = SED(freq,Temps[i])
plt.plot(freq*1e-12,r,color=colors[i],label=labels[i])
plt.legend(); plt.xlabel('frequency ( THz )'); plt.ylabel('SED_frequency ( J $m^{-3}$ $Hz^{-1}$ )')
plt.xlim(0.25e2,2.5e3); plt.show()_____no_output_____
</code>
### Permite escribir expresiones matemáticas complejas
Es posible escribir código en $\LaTeX$ si es necesario_____no_output_____\begin{align}
\frac{\partial u(\lambda, T)}{\partial \lambda} &= \frac{\partial}{\partial \lambda} \left( \frac{C_{1}}{\lambda^{5}}\left(\frac{1}{e^{C_{2}/T\lambda} -1}\right) \right) \\
0 &= \left(\frac{-5}{e^{C_{2}/T\lambda} -1}\frac{1}{\lambda^{6}}\right) + \left( \frac{C_{2}e^{C_{2}/T\lambda}}{T\lambda^{7}} \right)\left(\frac{1}{e^{C_{2}/T\lambda} -1}\right)^{2} \\
0 &= \frac{-\lambda T5}{C_{2}} + \frac{e^{C_{2}/T\lambda}}{e^{C_{2}/T\lambda} -1} \\
0 &= -5 + \left(\frac{C_{2}}{\lambda T}\right) \left(\frac{e^{C_{2}/T\lambda}}{e^{C_{2}/T\lambda} -1}\right)
\end{align}_____no_output_____## ¿Cómo es que usa un lenguaje diferente a Python?
- Un kernel es una especie de `motor computacional` que ejecuta el código dentro de un archivo `.ipynb`.
- Los kernels hay para varios lengajes de programación, como R, Bash, C++, julia.
<img src="./img/ker.png" style="width: 250px;"/>
## ¿Por qué Bash?
- Bash es un lenguaje de scripting que se comunica con la shell e históricamente ha ayudado a científicos a llevarse mejor con la bioinformática._____no_output_____## ¿Dónde encontramos las instrucciones para instalar Python?
- Es posible hacerlo de varias maneras: `Anaconda` y el `intérprete oficial` desde https://www.python.org/downloads/
- Usaremos el intérprete de `Anaconda`: es más fácil la instalación si no te acostumbras a usar la línea de comandos.
- Si ustedes ya están familiarizados con Python y no desean instalar el intérprete de `Anaconda` pueden usar `pip` desde https://pypi.org/project/bash_kernel/
<img src="./img/qrgit.png" style="width: 250px;"/>_____no_output_____## Deberes
- Creamos una carpeta en `google Drive` donde harán subirán los archivos `.ipynb` y una conversión a HTML, u otro tipo de archivo dependiendo de la sesión.
- Vamos a tener un quiz cada semana, que les enviaremos por el servidor de Discord del grupo de estudio.
- El deber para la siguiente semana:
1. Instalar Ubuntu si aún no lo poseen usando cualquiera de las alternativas presentadas.
2. Instalar Anaconda, jupyter lab y el kernel de bash.
Se deben enviar un documento word o pdf con capturas de pantalla que compruebe esto.
Si tienen algún problema, usen por favor los foros de `Discord` y nos ayudamos entre todos.
<img src="./img/deberes.png" style="width: 500px;"/>_____no_output_____
| {
"repository": "RSG-Ecuador/Grupo-De-Estudio-Linux-Bash-",
"path": "Sesion1_Que_es_Linux/deberes/jupyter.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 8,
"size": 47442,
"hexsha": "d0afa7f47d7dfedff7a70e9af6f49aca73aaf2d3",
"max_line_length": 41000,
"avg_line_length": 256.4432432432,
"alphanum_fraction": 0.914632604
} |
# Notebook from lankuohsing/Coursera-Deep-Learning-Specialization
Path: Sequence Models/week1/Dinosaurus_Island_Character_level_language_model_final_v3a.ipynb
# Character level language model - Dinosaurus Island
Welcome to Dinosaurus Island! 65 million years ago, dinosaurs existed, and in this assignment they are back. You are in charge of a special task. Leading biology researchers are creating new breeds of dinosaurs and bringing them to life on earth, and your job is to give names to these dinosaurs. If a dinosaur does not like its name, it might go berserk, so choose wisely!
<table>
<td>
<img src="images/dino.jpg" style="width:250;height:300px;">
</td>
</table>
Luckily you have learned some deep learning and you will use it to save the day. Your assistant has collected a list of all the dinosaur names they could find, and compiled them into this [dataset](dinos.txt). (Feel free to take a look by clicking the previous link.) To create new dinosaur names, you will build a character level language model to generate new names. Your algorithm will learn the different name patterns, and randomly generate new names. Hopefully this algorithm will keep you and your team safe from the dinosaurs' wrath!
By completing this assignment you will learn:
- How to store text data for processing using an RNN
- How to synthesize data, by sampling predictions at each time step and passing it to the next RNN-cell unit
- How to build a character-level text generation recurrent neural network
- Why clipping the gradients is important
We will begin by loading in some functions that we have provided for you in `rnn_utils`. Specifically, you have access to functions such as `rnn_forward` and `rnn_backward` which are equivalent to those you've implemented in the previous assignment. _____no_output_____## <font color='darkblue'>Updates</font>
#### If you were working on the notebook before this update...
* The current notebook is version "3a".
* You can find your original work saved in the notebook with the previous version name ("v3")
* To view the file directory, go to the menu "File->Open", and this will open a new tab that shows the file directory.
#### List of updates
* Sort and print `chars` list of characters.
* Import and use pretty print
* `clip`:
- Additional details on why we need to use the "out" parameter.
- Modified for loop to have students fill in the correct items to loop through.
- Added a test case to check for hard-coding error.
* `sample`
- additional hints added to steps 1,2,3,4.
- "Using 2D arrays instead of 1D arrays".
- explanation of numpy.ravel().
- fixed expected output.
- clarified comments in the code.
* "training the model"
- Replaced the sample code with explanations for how to set the index, X and Y (for a better learning experience).
* Spelling, grammar and wording corrections._____no_output_____
<code>
import numpy as np
from utils import *
import random
import pprint_____no_output_____
</code>
## 1 - Problem Statement
### 1.1 - Dataset and Preprocessing
Run the following cell to read the dataset of dinosaur names, create a list of unique characters (such as a-z), and compute the dataset and vocabulary size. _____no_output_____
<code>
data = open('dinos.txt', 'r').read()
data= data.lower()
chars = list(set(data))
data_size, vocab_size = len(data), len(chars)
print('There are %d total characters and %d unique characters in your data.' % (data_size, vocab_size))There are 19909 total characters and 27 unique characters in your data.
</code>
* The characters are a-z (26 characters) plus the "\n" (or newline character).
* In this assignment, the newline character "\n" plays a role similar to the `<EOS>` (or "End of sentence") token we had discussed in lecture.
- Here, "\n" indicates the end of the dinosaur name rather than the end of a sentence.
* `char_to_ix`: In the cell below, we create a python dictionary (i.e., a hash table) to map each character to an index from 0-26.
* `ix_to_char`: We also create a second python dictionary that maps each index back to the corresponding character.
- This will help you figure out what index corresponds to what character in the probability distribution output of the softmax layer. _____no_output_____
<code>
chars = sorted(chars)
print(chars)['\n', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
char_to_ix = { ch:i for i,ch in enumerate(chars) }
ix_to_char = { i:ch for i,ch in enumerate(chars) }
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(ix_to_char){ 0: '\n',
1: 'a',
2: 'b',
3: 'c',
4: 'd',
5: 'e',
6: 'f',
7: 'g',
8: 'h',
9: 'i',
10: 'j',
11: 'k',
12: 'l',
13: 'm',
14: 'n',
15: 'o',
16: 'p',
17: 'q',
18: 'r',
19: 's',
20: 't',
21: 'u',
22: 'v',
23: 'w',
24: 'x',
25: 'y',
26: 'z'}
</code>
### 1.2 - Overview of the model
Your model will have the following structure:
- Initialize parameters
- Run the optimization loop
- Forward propagation to compute the loss function
- Backward propagation to compute the gradients with respect to the loss function
- Clip the gradients to avoid exploding gradients
- Using the gradients, update your parameters with the gradient descent update rule.
- Return the learned parameters
<img src="images/rnn.png" style="width:450;height:300px;">
<caption><center> **Figure 1**: Recurrent Neural Network, similar to what you had built in the previous notebook "Building a Recurrent Neural Network - Step by Step". </center></caption>
* At each time-step, the RNN tries to predict what is the next character given the previous characters.
* The dataset $\mathbf{X} = (x^{\langle 1 \rangle}, x^{\langle 2 \rangle}, ..., x^{\langle T_x \rangle})$ is a list of characters in the training set.
* $\mathbf{Y} = (y^{\langle 1 \rangle}, y^{\langle 2 \rangle}, ..., y^{\langle T_x \rangle})$ is the same list of characters but shifted one character forward.
* At every time-step $t$, $y^{\langle t \rangle} = x^{\langle t+1 \rangle}$. The prediction at time $t$ is the same as the input at time $t + 1$._____no_output_____## 2 - Building blocks of the model
In this part, you will build two important blocks of the overall model:
- Gradient clipping: to avoid exploding gradients
- Sampling: a technique used to generate characters
You will then apply these two functions to build the model._____no_output_____### 2.1 - Clipping the gradients in the optimization loop
In this section you will implement the `clip` function that you will call inside of your optimization loop.
#### Exploding gradients
* When gradients are very large, they're called "exploding gradients."
* Exploding gradients make the training process more difficult, because the updates may be so large that they "overshoot" the optimal values during back propagation.
Recall that your overall loop structure usually consists of:
* forward pass,
* cost computation,
* backward pass,
* parameter update.
Before updating the parameters, you will perform gradient clipping to make sure that your gradients are not "exploding."
#### gradient clipping
In the exercise below, you will implement a function `clip` that takes in a dictionary of gradients and returns a clipped version of gradients if needed.
* There are different ways to clip gradients.
* We will use a simple element-wise clipping procedure, in which every element of the gradient vector is clipped to lie between some range [-N, N].
* For example, if the N=10
- The range is [-10, 10]
- If any component of the gradient vector is greater than 10, it is set to 10.
- If any component of the gradient vector is less than -10, it is set to -10.
- If any components are between -10 and 10, they keep their original values.
<img src="images/clip.png" style="width:400;height:150px;">
<caption><center> **Figure 2**: Visualization of gradient descent with and without gradient clipping, in a case where the network is running into "exploding gradient" problems. </center></caption>
**Exercise**:
Implement the function below to return the clipped gradients of your dictionary `gradients`.
* Your function takes in a maximum threshold and returns the clipped versions of the gradients.
* You can check out [numpy.clip](https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.clip.html).
- You will need to use the argument "`out = ...`".
- Using the "`out`" parameter allows you to update a variable "in-place".
- If you don't use "`out`" argument, the clipped variable is stored in the variable "gradient" but does not update the gradient variables `dWax`, `dWaa`, `dWya`, `db`, `dby`._____no_output_____
<code>
### GRADED FUNCTION: clip
def clip(gradients, maxValue):
'''
Clips the gradients' values between minimum and maximum.
Arguments:
gradients -- a dictionary containing the gradients "dWaa", "dWax", "dWya", "db", "dby"
maxValue -- everything above this number is set to this number, and everything less than -maxValue is set to -maxValue
Returns:
gradients -- a dictionary with the clipped gradients.
'''
dWaa, dWax, dWya, db, dby = gradients['dWaa'], gradients['dWax'], gradients['dWya'], gradients['db'], gradients['dby']
### START CODE HERE ###
# clip to mitigate exploding gradients, loop over [dWax, dWaa, dWya, db, dby]. (≈2 lines)
for gradient in [dWaa, dWax, dWya, db, dby]:
np.clip(gradient,a_min=-maxValue,a_max=maxValue,out=gradient)
### END CODE HERE ###
gradients = {"dWaa": dWaa, "dWax": dWax, "dWya": dWya, "db": db, "dby": dby}
return gradients_____no_output_____# Test with a maxvalue of 10
maxValue = 10
np.random.seed(3)
dWax = np.random.randn(5,3)*10
dWaa = np.random.randn(5,5)*10
dWya = np.random.randn(2,5)*10
db = np.random.randn(5,1)*10
dby = np.random.randn(2,1)*10
gradients = {"dWax": dWax, "dWaa": dWaa, "dWya": dWya, "db": db, "dby": dby}
gradients = clip(gradients, maxValue)
print("gradients[\"dWaa\"][1][2] =", gradients["dWaa"][1][2])
print("gradients[\"dWax\"][3][1] =", gradients["dWax"][3][1])
print("gradients[\"dWya\"][1][2] =", gradients["dWya"][1][2])
print("gradients[\"db\"][4] =", gradients["db"][4])
print("gradients[\"dby\"][1] =", gradients["dby"][1])gradients["dWaa"][1][2] = 10.0
gradients["dWax"][3][1] = -10.0
gradients["dWya"][1][2] = 0.29713815361
gradients["db"][4] = [ 10.]
gradients["dby"][1] = [ 8.45833407]
</code>
** Expected output:**
```Python
gradients["dWaa"][1][2] = 10.0
gradients["dWax"][3][1] = -10.0
gradients["dWya"][1][2] = 0.29713815361
gradients["db"][4] = [ 10.]
gradients["dby"][1] = [ 8.45833407]
```_____no_output_____
<code>
# Test with a maxValue of 5
maxValue = 5
np.random.seed(3)
dWax = np.random.randn(5,3)*10
dWaa = np.random.randn(5,5)*10
dWya = np.random.randn(2,5)*10
db = np.random.randn(5,1)*10
dby = np.random.randn(2,1)*10
gradients = {"dWax": dWax, "dWaa": dWaa, "dWya": dWya, "db": db, "dby": dby}
gradients = clip(gradients, maxValue)
print("gradients[\"dWaa\"][1][2] =", gradients["dWaa"][1][2])
print("gradients[\"dWax\"][3][1] =", gradients["dWax"][3][1])
print("gradients[\"dWya\"][1][2] =", gradients["dWya"][1][2])
print("gradients[\"db\"][4] =", gradients["db"][4])
print("gradients[\"dby\"][1] =", gradients["dby"][1])gradients["dWaa"][1][2] = 5.0
gradients["dWax"][3][1] = -5.0
gradients["dWya"][1][2] = 0.29713815361
gradients["db"][4] = [ 5.]
gradients["dby"][1] = [ 5.]
</code>
** Expected Output: **
```Python
gradients["dWaa"][1][2] = 5.0
gradients["dWax"][3][1] = -5.0
gradients["dWya"][1][2] = 0.29713815361
gradients["db"][4] = [ 5.]
gradients["dby"][1] = [ 5.]
```_____no_output_____### 2.2 - Sampling
Now assume that your model is trained. You would like to generate new text (characters). The process of generation is explained in the picture below:
<img src="images/dinos3.png" style="width:500;height:300px;">
<caption><center> **Figure 3**: In this picture, we assume the model is already trained. We pass in $x^{\langle 1\rangle} = \vec{0}$ at the first time step, and have the network sample one character at a time. </center></caption>_____no_output_____**Exercise**: Implement the `sample` function below to sample characters. You need to carry out 4 steps:
- **Step 1**: Input the "dummy" vector of zeros $x^{\langle 1 \rangle} = \vec{0}$.
- This is the default input before we've generated any characters.
We also set $a^{\langle 0 \rangle} = \vec{0}$_____no_output_____- **Step 2**: Run one step of forward propagation to get $a^{\langle 1 \rangle}$ and $\hat{y}^{\langle 1 \rangle}$. Here are the equations:
hidden state:
$$ a^{\langle t+1 \rangle} = \tanh(W_{ax} x^{\langle t+1 \rangle } + W_{aa} a^{\langle t \rangle } + b)\tag{1}$$
activation:
$$ z^{\langle t + 1 \rangle } = W_{ya} a^{\langle t + 1 \rangle } + b_y \tag{2}$$
prediction:
$$ \hat{y}^{\langle t+1 \rangle } = softmax(z^{\langle t + 1 \rangle })\tag{3}$$
- Details about $\hat{y}^{\langle t+1 \rangle }$:
- Note that $\hat{y}^{\langle t+1 \rangle }$ is a (softmax) probability vector (its entries are between 0 and 1 and sum to 1).
- $\hat{y}^{\langle t+1 \rangle}_i$ represents the probability that the character indexed by "i" is the next character.
- We have provided a `softmax()` function that you can use._____no_output_____#### Additional Hints
- $x^{\langle 1 \rangle}$ is `x` in the code. When creating the one-hot vector, make a numpy array of zeros, with the number of rows equal to the number of unique characters, and the number of columns equal to one. It's a 2D and not a 1D array.
- $a^{\langle 0 \rangle}$ is `a_prev` in the code. It is a numpy array of zeros, where the number of rows is $n_{a}$, and number of columns is 1. It is a 2D array as well. $n_{a}$ is retrieved by getting the number of columns in $W_{aa}$ (the numbers need to match in order for the matrix multiplication $W_{aa}a^{\langle t \rangle}$ to work.
- [numpy.dot](https://docs.scipy.org/doc/numpy/reference/generated/numpy.dot.html)
- [numpy.tanh](https://docs.scipy.org/doc/numpy/reference/generated/numpy.tanh.html)_____no_output_____#### Using 2D arrays instead of 1D arrays
* You may be wondering why we emphasize that $x^{\langle 1 \rangle}$ and $a^{\langle 0 \rangle}$ are 2D arrays and not 1D vectors.
* For matrix multiplication in numpy, if we multiply a 2D matrix with a 1D vector, we end up with with a 1D array.
* This becomes a problem when we add two arrays where we expected them to have the same shape.
* When two arrays with a different number of dimensions are added together, Python "broadcasts" one across the other.
* Here is some sample code that shows the difference between using a 1D and 2D array._____no_output_____
<code>
import numpy as np_____no_output_____matrix1 = np.array([[1,1],[2,2],[3,3]]) # (3,2)
matrix2 = np.array([[0],[0],[0]]) # (3,1)
vector1D = np.array([1,1]) # (2,)
vector2D = np.array([[1],[1]]) # (2,1)
print("matrix1 \n", matrix1,"\n")
print("matrix2 \n", matrix2,"\n")
print("vector1D \n", vector1D,"\n")
print("vector2D \n", vector2D)matrix1
[[1 1]
[2 2]
[3 3]]
matrix2
[[0]
[0]
[0]]
vector1D
[1 1]
vector2D
[[1]
[1]]
print("Multiply 2D and 1D arrays: result is a 1D array\n",
np.dot(matrix1,vector1D))
print("Multiply 2D and 2D arrays: result is a 2D array\n",
np.dot(matrix1,vector2D))Multiply 2D and 1D arrays: result is a 1D array
[2 4 6]
Multiply 2D and 2D arrays: result is a 2D array
[[2]
[4]
[6]]
print("Adding (3 x 1) vector to a (3 x 1) vector is a (3 x 1) vector\n",
"This is what we want here!\n",
np.dot(matrix1,vector2D) + matrix2)Adding (3 x 1) vector to a (3 x 1) vector is a (3 x 1) vector
This is what we want here!
[[2]
[4]
[6]]
print("Adding a (3,) vector to a (3 x 1) vector\n",
"broadcasts the 1D array across the second dimension\n",
"Not what we want here!\n",
np.dot(matrix1,vector1D) + matrix2
)Adding a (3,) vector to a (3 x 1) vector
broadcasts the 1D array across the second dimension
Not what we want here!
[[2 4 6]
[2 4 6]
[2 4 6]]
</code>
- **Step 3**: Sampling:
- Now that we have $y^{\langle t+1 \rangle}$, we want to select the next letter in the dinosaur name. If we select the most probable, the model will always generate the same result given a starting letter.
- To make the results more interesting, we will use np.random.choice to select a next letter that is likely, but not always the same.
- Sampling is the selection of a value from a group of values, where each value has a probability of being picked.
- Sampling allows us to generate random sequences of values.
- Pick the next character's index according to the probability distribution specified by $\hat{y}^{\langle t+1 \rangle }$.
- This means that if $\hat{y}^{\langle t+1 \rangle }_i = 0.16$, you will pick the index "i" with 16% probability.
- You can use [np.random.choice](https://docs.scipy.org/doc/numpy-1.13.0/reference/generated/numpy.random.choice.html).
Example of how to use `np.random.choice()`:
```python
np.random.seed(0)
probs = np.array([0.1, 0.0, 0.7, 0.2])
idx = np.random.choice([0, 1, 2, 3] p = probs)
```
- This means that you will pick the index (`idx`) according to the distribution:
$P(index = 0) = 0.1, P(index = 1) = 0.0, P(index = 2) = 0.7, P(index = 3) = 0.2$.
- Note that the value that's set to `p` should be set to a 1D vector.
- Also notice that $\hat{y}^{\langle t+1 \rangle}$, which is `y` in the code, is a 2D array._____no_output_____##### Additional Hints
- [range](https://docs.python.org/3/library/functions.html#func-range)
- [numpy.ravel](https://docs.scipy.org/doc/numpy/reference/generated/numpy.ravel.html) takes a multi-dimensional array and returns its contents inside of a 1D vector.
```Python
arr = np.array([[1,2],[3,4]])
print("arr")
print(arr)
print("arr.ravel()")
print(arr.ravel())
```
Output:
```Python
arr
[[1 2]
[3 4]]
arr.ravel()
[1 2 3 4]
```
- Note that `append` is an "in-place" operation. In other words, don't do this:
```Python
fun_hobbies = fun_hobbies.append('learning') ## Doesn't give you what you want
```_____no_output_____- **Step 4**: Update to $x^{\langle t \rangle }$
- The last step to implement in `sample()` is to update the variable `x`, which currently stores $x^{\langle t \rangle }$, with the value of $x^{\langle t + 1 \rangle }$.
- You will represent $x^{\langle t + 1 \rangle }$ by creating a one-hot vector corresponding to the character that you have chosen as your prediction.
- You will then forward propagate $x^{\langle t + 1 \rangle }$ in Step 1 and keep repeating the process until you get a "\n" character, indicating that you have reached the end of the dinosaur name. _____no_output_____##### Additional Hints
- In order to reset `x` before setting it to the new one-hot vector, you'll want to set all the values to zero.
- You can either create a new numpy array: [numpy.zeros](https://docs.scipy.org/doc/numpy/reference/generated/numpy.zeros.html)
- Or fill all values with a single number: [numpy.ndarray.fill](https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.fill.html)_____no_output_____
<code>
# GRADED FUNCTION: sample
def sample(parameters, char_to_ix, seed):
"""
Sample a sequence of characters according to a sequence of probability distributions output of the RNN
Arguments:
parameters -- python dictionary containing the parameters Waa, Wax, Wya, by, and b.
char_to_ix -- python dictionary mapping each character to an index.
seed -- used for grading purposes. Do not worry about it.
Returns:
indices -- a list of length n containing the indices of the sampled characters.
"""
# Retrieve parameters and relevant shapes from "parameters" dictionary
Waa, Wax, Wya, by, b = parameters['Waa'], parameters['Wax'], parameters['Wya'], parameters['by'], parameters['b']
vocab_size = by.shape[0]
n_a = Waa.shape[1]
### START CODE HERE ###
# Step 1: Create the a zero vector x that can be used as the one-hot vector
# representing the first character (initializing the sequence generation). (≈1 line)
x = np.zeros((vocab_size,1))
# Step 1': Initialize a_prev as zeros (≈1 line)
a_prev = np.zeros((n_a,1))
# Create an empty list of indices, this is the list which will contain the list of indices of the characters to generate (≈1 line)
indices = []
# idx is the index of the one-hot vector x that is set to 1
# All other positions in x are zero.
# We will initialize idx to -1
idx = -1
# Loop over time-steps t. At each time-step:
# sample a character from a probability distribution
# and append its index (`idx`) to the list "indices".
# We'll stop if we reach 50 characters
# (which should be very unlikely with a well trained model).
# Setting the maximum number of characters helps with debugging and prevents infinite loops.
counter = 0
newline_character = char_to_ix['\n']
while (idx != newline_character and counter != 50):
# Step 2: Forward propagate x using the equations (1), (2) and (3)
a = np.tanh(np.dot(Wax,x)+np.dot(Waa,a_prev)+b)
z = np.dot(Wya,a)+by
y = softmax(z)
# for grading purposes
np.random.seed(counter+seed)
# Step 3: Sample the index of a character within the vocabulary from the probability distribution y
# (see additional hints above)
idx = np.random.choice(list(range(0,vocab_size)),p=y.ravel())
# Append the index to "indices"
indices.append(idx)
# Step 4: Overwrite the input x with one that corresponds to the sampled index `idx`.
# (see additional hints above)
x = np.zeros((vocab_size,1))
x[idx] = 1
# Update "a_prev" to be "a"
a_prev = a
# for grading purposes
seed += 1
counter +=1
### END CODE HERE ###
if (counter == 50):
indices.append(char_to_ix['\n'])
return indices_____no_output_____np.random.seed(2)
_, n_a = 20, 100
Wax, Waa, Wya = np.random.randn(n_a, vocab_size), np.random.randn(n_a, n_a), np.random.randn(vocab_size, n_a)
b, by = np.random.randn(n_a, 1), np.random.randn(vocab_size, 1)
parameters = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}
indices = sample(parameters, char_to_ix, 0)
print("Sampling:")
print("list of sampled indices:\n", indices)
print("size indices:\n", len(indices))
print("list of sampled characters:\n", [ix_to_char[i] for i in indices])Sampling:
list of sampled indices:
[12, 17, 24, 14, 13, 9, 10, 22, 24, 6, 13, 11, 12, 6, 21, 15, 21, 14, 3, 2, 1, 21, 18, 24, 7, 25, 6, 25, 18, 10, 16, 2, 3, 8, 15, 12, 11, 7, 1, 12, 10, 2, 7, 7, 11, 17, 24, 12, 13, 24, 0]
size indices:
51
list of sampled characters:
['l', 'q', 'x', 'n', 'm', 'i', 'j', 'v', 'x', 'f', 'm', 'k', 'l', 'f', 'u', 'o', 'u', 'n', 'c', 'b', 'a', 'u', 'r', 'x', 'g', 'y', 'f', 'y', 'r', 'j', 'p', 'b', 'c', 'h', 'o', 'l', 'k', 'g', 'a', 'l', 'j', 'b', 'g', 'g', 'k', 'q', 'x', 'l', 'm', 'x', '\n']
</code>
** Expected output:**
```Python
Sampling:
list of sampled indices:
[12, 17, 24, 14, 13, 9, 10, 22, 24, 6, 13, 11, 12, 6, 21, 15, 21, 14, 3, 2, 1, 21, 18, 24, 7, 25, 6, 25, 18, 10, 16, 2, 3, 8, 15, 12, 11, 7, 1, 12, 10, 2, 7, 7, 11, 17, 24, 12, 13, 24, 0]
list of sampled characters:
['l', 'q', 'x', 'n', 'm', 'i', 'j', 'v', 'x', 'f', 'm', 'k', 'l', 'f', 'u', 'o', 'u', 'n', 'c', 'b', 'a', 'u', 'r', 'x', 'g', 'y', 'f', 'y', 'r', 'j', 'p', 'b', 'c', 'h', 'o', 'l', 'k', 'g', 'a', 'l', 'j', 'b', 'g', 'g', 'k', 'q', 'x', 'l', 'm', 'x', '\n']
```
* Please note that over time, if there are updates to the back-end of the Coursera platform (that may update the version of numpy), the actual list of sampled indices and sampled characters may change.
* If you follow the instructions given above and get an output without errors, it's possible the routine is correct even if your output doesn't match the expected output. Submit your assignment to the grader to verify its correctness._____no_output_____## 3 - Building the language model
It is time to build the character-level language model for text generation.
### 3.1 - Gradient descent
* In this section you will implement a function performing one step of stochastic gradient descent (with clipped gradients).
* You will go through the training examples one at a time, so the optimization algorithm will be stochastic gradient descent.
As a reminder, here are the steps of a common optimization loop for an RNN:
- Forward propagate through the RNN to compute the loss
- Backward propagate through time to compute the gradients of the loss with respect to the parameters
- Clip the gradients
- Update the parameters using gradient descent
**Exercise**: Implement the optimization process (one step of stochastic gradient descent).
The following functions are provided:
```python
def rnn_forward(X, Y, a_prev, parameters):
""" Performs the forward propagation through the RNN and computes the cross-entropy loss.
It returns the loss' value as well as a "cache" storing values to be used in backpropagation."""
....
return loss, cache
def rnn_backward(X, Y, parameters, cache):
""" Performs the backward propagation through time to compute the gradients of the loss with respect
to the parameters. It returns also all the hidden states."""
...
return gradients, a
def update_parameters(parameters, gradients, learning_rate):
""" Updates parameters using the Gradient Descent Update Rule."""
...
return parameters
```
Recall that you previously implemented the `clip` function:
```Python
def clip(gradients, maxValue)
"""Clips the gradients' values between minimum and maximum."""
...
return gradients
```_____no_output_____#### parameters
* Note that the weights and biases inside the `parameters` dictionary are being updated by the optimization, even though `parameters` is not one of the returned values of the `optimize` function. The `parameters` dictionary is passed by reference into the function, so changes to this dictionary are making changes to the `parameters` dictionary even when accessed outside of the function.
* Python dictionaries and lists are "pass by reference", which means that if you pass a dictionary into a function and modify the dictionary within the function, this changes that same dictionary (it's not a copy of the dictionary)._____no_output_____
<code>
# GRADED FUNCTION: optimize
def optimize(X, Y, a_prev, parameters, learning_rate = 0.01):
"""
Execute one step of the optimization to train the model.
Arguments:
X -- list of integers, where each integer is a number that maps to a character in the vocabulary.
Y -- list of integers, exactly the same as X but shifted one index to the left.
a_prev -- previous hidden state.
parameters -- python dictionary containing:
Wax -- Weight matrix multiplying the input, numpy array of shape (n_a, n_x)
Waa -- Weight matrix multiplying the hidden state, numpy array of shape (n_a, n_a)
Wya -- Weight matrix relating the hidden-state to the output, numpy array of shape (n_y, n_a)
b -- Bias, numpy array of shape (n_a, 1)
by -- Bias relating the hidden-state to the output, numpy array of shape (n_y, 1)
learning_rate -- learning rate for the model.
Returns:
loss -- value of the loss function (cross-entropy)
gradients -- python dictionary containing:
dWax -- Gradients of input-to-hidden weights, of shape (n_a, n_x)
dWaa -- Gradients of hidden-to-hidden weights, of shape (n_a, n_a)
dWya -- Gradients of hidden-to-output weights, of shape (n_y, n_a)
db -- Gradients of bias vector, of shape (n_a, 1)
dby -- Gradients of output bias vector, of shape (n_y, 1)
a[len(X)-1] -- the last hidden state, of shape (n_a, 1)
"""
### START CODE HERE ###
# Forward propagate through time (≈1 line)
loss, cache = rnn_forward(X, Y, a_prev, parameters)
# Backpropagate through time (≈1 line)
gradients, a = rnn_backward(X, Y, parameters, cache)
# Clip your gradients between -5 (min) and 5 (max) (≈1 line)
gradients = clip(gradients, 5)
# Update parameters (≈1 line)
parameters = update_parameters(parameters, gradients, learning_rate)
### END CODE HERE ###
return loss, gradients, a[len(X)-1]_____no_output_____np.random.seed(1)
vocab_size, n_a = 27, 100
a_prev = np.random.randn(n_a, 1)
Wax, Waa, Wya = np.random.randn(n_a, vocab_size), np.random.randn(n_a, n_a), np.random.randn(vocab_size, n_a)
b, by = np.random.randn(n_a, 1), np.random.randn(vocab_size, 1)
parameters = {"Wax": Wax, "Waa": Waa, "Wya": Wya, "b": b, "by": by}
X = [12,3,5,11,22,3]
Y = [4,14,11,22,25, 26]
loss, gradients, a_last = optimize(X, Y, a_prev, parameters, learning_rate = 0.01)
print("Loss =", loss)
print("gradients[\"dWaa\"][1][2] =", gradients["dWaa"][1][2])
print("np.argmax(gradients[\"dWax\"]) =", np.argmax(gradients["dWax"]))
print("gradients[\"dWya\"][1][2] =", gradients["dWya"][1][2])
print("gradients[\"db\"][4] =", gradients["db"][4])
print("gradients[\"dby\"][1] =", gradients["dby"][1])
print("a_last[4] =", a_last[4])Loss = 126.503975722
gradients["dWaa"][1][2] = 0.194709315347
np.argmax(gradients["dWax"]) = 93
gradients["dWya"][1][2] = -0.007773876032
gradients["db"][4] = [-0.06809825]
gradients["dby"][1] = [ 0.01538192]
a_last[4] = [-1.]
</code>
** Expected output:**
```Python
Loss = 126.503975722
gradients["dWaa"][1][2] = 0.194709315347
np.argmax(gradients["dWax"]) = 93
gradients["dWya"][1][2] = -0.007773876032
gradients["db"][4] = [-0.06809825]
gradients["dby"][1] = [ 0.01538192]
a_last[4] = [-1.]
```_____no_output_____### 3.2 - Training the model _____no_output_____* Given the dataset of dinosaur names, we use each line of the dataset (one name) as one training example.
* Every 100 steps of stochastic gradient descent, you will sample 10 randomly chosen names to see how the algorithm is doing.
* Remember to shuffle the dataset, so that stochastic gradient descent visits the examples in random order.
**Exercise**: Follow the instructions and implement `model()`. When `examples[index]` contains one dinosaur name (string), to create an example (X, Y), you can use this:
##### Set the index `idx` into the list of examples
* Using the for-loop, walk through the shuffled list of dinosaur names in the list "examples".
* If there are 100 examples, and the for-loop increments the index to 100 onwards, think of how you would make the index cycle back to 0, so that we can continue feeding the examples into the model when j is 100, 101, etc.
* Hint: 101 divided by 100 is zero with a remainder of 1.
* `%` is the modulus operator in python.
##### Extract a single example from the list of examples
* `single_example`: use the `idx` index that you set previously to get one word from the list of examples._____no_output_____##### Convert a string into a list of characters: `single_example_chars`
* `single_example_chars`: A string is a list of characters.
* You can use a list comprehension (recommended over for-loops) to generate a list of characters.
```Python
str = 'I love learning'
list_of_chars = [c for c in str]
print(list_of_chars)
```
```
['I', ' ', 'l', 'o', 'v', 'e', ' ', 'l', 'e', 'a', 'r', 'n', 'i', 'n', 'g']
```_____no_output_____##### Convert list of characters to a list of integers: `single_example_ix`
* Create a list that contains the index numbers associated with each character.
* Use the dictionary `char_to_ix`
* You can combine this with the list comprehension that is used to get a list of characters from a string.
* This is a separate line of code below, to help learners clarify each step in the function._____no_output_____##### Create the list of input characters: `X`
* `rnn_forward` uses the `None` value as a flag to set the input vector as a zero-vector.
* Prepend the `None` value in front of the list of input characters.
* There is more than one way to prepend a value to a list. One way is to add two lists together: `['a'] + ['b']`_____no_output_____##### Get the integer representation of the newline character `ix_newline`
* `ix_newline`: The newline character signals the end of the dinosaur name.
- get the integer representation of the newline character `'\n'`.
- Use `char_to_ix`_____no_output_____##### Set the list of labels (integer representation of the characters): `Y`
* The goal is to train the RNN to predict the next letter in the name, so the labels are the list of characters that are one time step ahead of the characters in the input `X`.
- For example, `Y[0]` contains the same value as `X[1]`
* The RNN should predict a newline at the last letter so add ix_newline to the end of the labels.
- Append the integer representation of the newline character to the end of `Y`.
- Note that `append` is an in-place operation.
- It might be easier for you to add two lists together._____no_output_____
<code>
# GRADED FUNCTION: model
def model(data, ix_to_char, char_to_ix, num_iterations = 35000, n_a = 50, dino_names = 7, vocab_size = 27):
"""
Trains the model and generates dinosaur names.
Arguments:
data -- text corpus
ix_to_char -- dictionary that maps the index to a character
char_to_ix -- dictionary that maps a character to an index
num_iterations -- number of iterations to train the model for
n_a -- number of units of the RNN cell
dino_names -- number of dinosaur names you want to sample at each iteration.
vocab_size -- number of unique characters found in the text (size of the vocabulary)
Returns:
parameters -- learned parameters
"""
# Retrieve n_x and n_y from vocab_size
n_x, n_y = vocab_size, vocab_size
# Initialize parameters
parameters = initialize_parameters(n_a, n_x, n_y)
# Initialize loss (this is required because we want to smooth our loss)
loss = get_initial_loss(vocab_size, dino_names)
# Build list of all dinosaur names (training examples).
with open("dinos.txt") as f:
examples = f.readlines()
examples = [x.lower().strip() for x in examples]
# Shuffle list of all dinosaur names
np.random.seed(0)
np.random.shuffle(examples)
# Initialize the hidden state of your LSTM
a_prev = np.zeros((n_a, 1))
# Optimization loop
for j in range(num_iterations):
### START CODE HERE ###
# Set the index `idx` (see instructions above)
idx = j%len(examples)
# Set the input X (see instructions above)
single_example = examples[idx]
single_example_chars = [c for c in single_example]
single_example_ix = [char_to_ix[c] for c in single_example_chars]
X = [None]+single_example_ix
# Set the labels Y (see instructions above)
ix_newline = char_to_ix["\n"]
Y = X[1:]+[ix_newline]
# Perform one optimization step: Forward-prop -> Backward-prop -> Clip -> Update parameters
# Choose a learning rate of 0.01
curr_loss, gradients, a_prev = optimize(X, Y, a_prev, parameters, learning_rate = 0.01)
### END CODE HERE ###
# Use a latency trick to keep the loss smooth. It happens here to accelerate the training.
loss = smooth(loss, curr_loss)
# Every 2000 Iteration, generate "n" characters thanks to sample() to check if the model is learning properly
if j % 2000 == 0:
print('Iteration: %d, Loss: %f' % (j, loss) + '\n')
# The number of dinosaur names to print
seed = 0
for name in range(dino_names):
# Sample indices and print them
sampled_indices = sample(parameters, char_to_ix, seed)
print_sample(sampled_indices, ix_to_char)
seed += 1 # To get the same result (for grading purposes), increment the seed by one.
print('\n')
return parameters_____no_output_____
</code>
Run the following cell, you should observe your model outputting random-looking characters at the first iteration. After a few thousand iterations, your model should learn to generate reasonable-looking names. _____no_output_____
<code>
parameters = model(data, ix_to_char, char_to_ix)Iteration: 0, Loss: 23.087336
Nkzxwtdmfqoeyhsqwasjkjvu
Kneb
Kzxwtdmfqoeyhsqwasjkjvu
Neb
Zxwtdmfqoeyhsqwasjkjvu
Eb
Xwtdmfqoeyhsqwasjkjvu
Iteration: 2000, Loss: 27.884160
Liusskeomnolxeros
Hmdaairus
Hytroligoraurus
Lecalosapaus
Xusicikoraurus
Abalpsamantisaurus
Tpraneronxeros
Iteration: 4000, Loss: 25.901815
Mivrosaurus
Inee
Ivtroplisaurus
Mbaaisaurus
Wusichisaurus
Cabaselachus
Toraperlethosdarenitochusthiamamumamaon
Iteration: 6000, Loss: 24.608779
Onwusceomosaurus
Lieeaerosaurus
Lxussaurus
Oma
Xusteonosaurus
Eeahosaurus
Toreonosaurus
Iteration: 8000, Loss: 24.070350
Onxusichepriuon
Kilabersaurus
Lutrodon
Omaaerosaurus
Xutrcheps
Edaksoje
Trodiktonus
Iteration: 10000, Loss: 23.844446
Onyusaurus
Klecalosaurus
Lustodon
Ola
Xusodonia
Eeaeosaurus
Troceosaurus
Iteration: 12000, Loss: 23.291971
Onyxosaurus
Kica
Lustrepiosaurus
Olaagrraiansaurus
Yuspangosaurus
Eealosaurus
Trognesaurus
Iteration: 14000, Loss: 23.382338
Meutromodromurus
Inda
Iutroinatorsaurus
Maca
Yusteratoptititan
Ca
Troclosaurus
Iteration: 16000, Loss: 23.255630
Meustolkanolus
Indabestacarospceryradwalosaurus
Justolopinaveraterasauracoptelalenyden
Maca
Yusocles
Daahosaurus
Trodon
Iteration: 18000, Loss: 22.905483
Phytronn
Meicanstolanthus
Mustrisaurus
Pegalosaurus
Yuskercis
Egalosaurus
Tromelosaurus
Iteration: 20000, Loss: 22.873854
Nlyushanerohyisaurus
Loga
Lustrhigosaurus
Nedalosaurus
Yuslangosaurus
Elagosaurus
Trrangosaurus
Iteration: 22000, Loss: 22.710545
Onyxromicoraurospareiosatrus
Liga
Mustoffankeugoptardoros
Ola
Yusodogongterosaurus
Ehaerona
Trododongxernochenhus
Iteration: 24000, Loss: 22.604827
Meustognathiterhucoplithaloptha
Jigaadosaurus
Kurrodon
Mecaistheansaurus
Yuromelosaurus
Eiaeropeeton
Troenathiteritaus
Iteration: 26000, Loss: 22.714486
Nhyxosaurus
Kola
Lvrosaurus
Necalosaurus
Yurolonlus
Ejakosaurus
Troindronykus
Iteration: 28000, Loss: 22.647640
Onyxosaurus
Loceahosaurus
Lustleonlonx
Olabasicachudrakhurgawamosaurus
Ytrojianiisaurus
Eladon
Tromacimathoshargicitan
Iteration: 30000, Loss: 22.598485
Oryuton
Locaaesaurus
Lustoendosaurus
Olaahus
Yusaurus
Ehadopldarshuellus
Troia
Iteration: 32000, Loss: 22.211861
Meutronlapsaurus
Kracallthcaps
Lustrathus
Macairugeanosaurus
Yusidoneraverataus
Eialosaurus
Troimaniathonsaurus
Iteration: 34000, Loss: 22.447230
Onyxipaledisons
Kiabaeropa
Lussiamang
Pacaeptabalsaurus
Xosalong
Eiacoteg
Troia
</code>
** Expected Output**
The output of your model may look different, but it will look something like this:
```Python
Iteration: 34000, Loss: 22.447230
Onyxipaledisons
Kiabaeropa
Lussiamang
Pacaeptabalsaurus
Xosalong
Eiacoteg
Troia
```_____no_output_____## Conclusion
You can see that your algorithm has started to generate plausible dinosaur names towards the end of the training. At first, it was generating random characters, but towards the end you could see dinosaur names with cool endings. Feel free to run the algorithm even longer and play with hyperparameters to see if you can get even better results. Our implementation generated some really cool names like `maconucon`, `marloralus` and `macingsersaurus`. Your model hopefully also learned that dinosaur names tend to end in `saurus`, `don`, `aura`, `tor`, etc.
If your model generates some non-cool names, don't blame the model entirely--not all actual dinosaur names sound cool. (For example, `dromaeosauroides` is an actual dinosaur name and is in the training set.) But this model should give you a set of candidates from which you can pick the coolest!
This assignment had used a relatively small dataset, so that you could train an RNN quickly on a CPU. Training a model of the english language requires a much bigger dataset, and usually needs much more computation, and could run for many hours on GPUs. We ran our dinosaur name for quite some time, and so far our favorite name is the great, undefeatable, and fierce: Mangosaurus!
<img src="images/mangosaurus.jpeg" style="width:250;height:300px;">_____no_output_____## 4 - Writing like Shakespeare
The rest of this notebook is optional and is not graded, but we hope you'll do it anyway since it's quite fun and informative.
A similar (but more complicated) task is to generate Shakespeare poems. Instead of learning from a dataset of Dinosaur names you can use a collection of Shakespearian poems. Using LSTM cells, you can learn longer term dependencies that span many characters in the text--e.g., where a character appearing somewhere a sequence can influence what should be a different character much much later in the sequence. These long term dependencies were less important with dinosaur names, since the names were quite short.
<img src="images/shakespeare.jpg" style="width:500;height:400px;">
<caption><center> Let's become poets! </center></caption>
We have implemented a Shakespeare poem generator with Keras. Run the following cell to load the required packages and models. This may take a few minutes. _____no_output_____
<code>
from __future__ import print_function
from keras.callbacks import LambdaCallback
from keras.models import Model, load_model, Sequential
from keras.layers import Dense, Activation, Dropout, Input, Masking
from keras.layers import LSTM
from keras.utils.data_utils import get_file
from keras.preprocessing.sequence import pad_sequences
from shakespeare_utils import *
import sys
import ioUsing TensorFlow backend.
</code>
To save you some time, we have already trained a model for ~1000 epochs on a collection of Shakespearian poems called [*"The Sonnets"*](shakespeare.txt). _____no_output_____Let's train the model for one more epoch. When it finishes training for an epoch---this will also take a few minutes---you can run `generate_output`, which will prompt asking you for an input (`<`40 characters). The poem will start with your sentence, and our RNN-Shakespeare will complete the rest of the poem for you! For example, try "Forsooth this maketh no sense " (don't enter the quotation marks). Depending on whether you include the space at the end, your results might also differ--try it both ways, and try other inputs as well.
_____no_output_____
<code>
print_callback = LambdaCallback(on_epoch_end=on_epoch_end)
model.fit(x, y, batch_size=128, epochs=1, callbacks=[print_callback])Epoch 1/1
31412/31412 [==============================] - 263s - loss: 2.5635
# Run this cell to try with different inputs without having to re-train the model
generate_output()Write the beginning of your poem, the Shakespeare machine will complete it. Your input is: Forsooth this maketh no sense
Here is your poem:
Forsooth this maketh no sense renping.
but a did sind make hil ons dede is men,
withou's inus will o decanot,
lek o whle thou debert should every forted,
whice muse whe bow way i hath, wom's (leccanny,
that minge in adited and forso caned doass,
with this of ares mote di have no lade peres,
in live mater for worre la cinse with will when thy comserd srobld,
now, boroor, my ho thate thought dewhice hevinging,
now not his wrecio
</code>
The RNN-Shakespeare model is very similar to the one you have built for dinosaur names. The only major differences are:
- LSTMs instead of the basic RNN to capture longer-range dependencies
- The model is a deeper, stacked LSTM model (2 layer)
- Using Keras instead of python to simplify the code
If you want to learn more, you can also check out the Keras Team's text generation implementation on GitHub: https://github.com/keras-team/keras/blob/master/examples/lstm_text_generation.py.
Congratulations on finishing this notebook! _____no_output_____**References**:
- This exercise took inspiration from Andrej Karpathy's implementation: https://gist.github.com/karpathy/d4dee566867f8291f086. To learn more about text generation, also check out Karpathy's [blog post](http://karpathy.github.io/2015/05/21/rnn-effectiveness/).
- For the Shakespearian poem generator, our implementation was based on the implementation of an LSTM text generator by the Keras team: https://github.com/keras-team/keras/blob/master/examples/lstm_text_generation.py _____no_output_____
| {
"repository": "lankuohsing/Coursera-Deep-Learning-Specialization",
"path": "Sequence Models/week1/Dinosaurus_Island_Character_level_language_model_final_v3a.ipynb",
"matched_keywords": [
"biology"
],
"stars": null,
"size": 62618,
"hexsha": "d0b0f56073f83749a055b1b2deb60f1d44d1a931",
"max_line_length": 565,
"avg_line_length": 38.3923973023,
"alphanum_fraction": 0.5588329234
} |
# Notebook from biosignalsnotebooks/biosignalsnotebooks
Path: notebookToHtml/biosignalsnotebooks_html_publish/Categories/Install/prepare_anaconda.ipynb
<table width="100%">
<tr style="border-bottom:solid 2pt #009EE3">
<td style="text-align:left" width="10%">
<a href="prepare_anaconda.dwipynb" download><img src="../../images/icons/download.png"></a>
</td>
<td style="text-align:left" width="10%">
<a href="https://mybinder.org/v2/gh/biosignalsnotebooks/biosignalsnotebooks/biosignalsnotebooks_binder?filepath=biosignalsnotebooks_environment%2Fcategories%2FInstall%2Fprepare_anaconda.dwipynb" target="_blank"><img src="../../images/icons/program.png" title="Be creative and test your solutions !"></a>
</td>
<td></td>
<td style="text-align:left" width="5%">
<a href="../MainFiles/biosignalsnotebooks.ipynb"><img src="../../images/icons/home.png"></a>
</td>
<td style="text-align:left" width="5%">
<a href="../MainFiles/contacts.ipynb"><img src="../../images/icons/contacts.png"></a>
</td>
<td style="text-align:left" width="5%">
<a href="https://github.com/biosignalsnotebooks/biosignalsnotebooks" target="_blank"><img src="../../images/icons/github.png"></a>
</td>
<td style="border-left:solid 2pt #009EE3" width="15%">
<img src="../../images/ost_logo.png">
</td>
</tr>
</table>_____no_output_____<link rel="stylesheet" href="../../styles/theme_style.css">
<!--link rel="stylesheet" href="../../styles/header_style.css"-->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
<table width="100%">
<tr>
<td id="image_td" width="15%" class="header_image_color_13"><div id="image_img"
class="header_image_13"></div></td>
<td class="header_text"> Download, Install and Execute Anaconda </td>
</tr>
</table>_____no_output_____<div id="flex-container">
<div id="diff_level" class="flex-item">
<strong>Difficulty Level:</strong> <span class="fa fa-star checked"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
<span class="fa fa-star"></span>
</div>
<div id="tag" class="flex-item-tag">
<span id="tag_list">
<table id="tag_list_table">
<tr>
<td class="shield_left">Tags</td>
<td class="shield_right" id="tags">install☁jupyter☁notebook☁anaconda☁download</td>
</tr>
</table>
</span>
<!-- [OR] Visit https://img.shields.io in order to create a tag badge-->
</div>
</div>_____no_output_____In every journey we always need to prepare our toolbox with the needed resources !
With <strong><span class="color1">biosignalsnotebooks</span></strong> happens the same, being <strong><span class="color4">Jupyter Notebook</span></strong> environment the most relevant application (that supports <strong><span class="color1">biosignalsnotebooks</span></strong>) to take the maximum advantage during your learning process.
In the following sequence of instruction it will be presented the operations that should be completed in order to have <strong><span class="color4">Jupyter Notebook</span></strong> ready to use and to open our <strong>ipynb</strong> files on local server.
<table width="100%">
<tr>
<td style="text-align:left;font-size:12pt;border-top:dotted 2px #62C3EE">
<span class="color1">☌</span> The current <span class="color4"><strong>Jupyter Notebook</strong></span> is focused on a complete Python toolbox called <a href="https://www.anaconda.com/distribution/"><span class="color4"><strong>Anaconda <img src="../../images/icons/link.png" width="10px" height="10px" style="display:inline"></strong></span></a>.
However, there is an alternative approach to get all things ready for starting our journey, which is described on <a href="../Install/prepare_jupyter.ipynb"><span class="color1"><strong>"Download, Install and Execute Jypyter Notebook Environment" <img src="../../images/icons/link.png" width="10px" height="10px" style="display:inline"></strong></span></a>
</td>
</tr>
</table>
<hr>_____no_output_____<hr>_____no_output_____<p class="steps">1 - Access the <strong><span class="color4">Anaconda</span></strong> official page at <a href="https://www.anaconda.com/distribution/">https://www.anaconda.com/distribution/</a></p>_____no_output_____<img src="../../images/other/anaconda_page.png">_____no_output_____<p class="steps">2 - Click on "Download" button, giving a first but strong step into our final objective</p>_____no_output_____<img src="../../images/other/anaconda_download.gif">_____no_output_____<p class="steps">3 - Specify the operating system of your local machine</p>_____no_output_____<img src="../../images/other/anaconda_download_os.gif">_____no_output_____<p class="steps">4 - Select the version of <span class="color1">Python</span> compiler to be included on <span class="color4">Anaconda</span></p>
It is strongly advisable that you chose version <strong>3.-</strong> to ensure that all functionalities of packages like <strong><span class="color1">biosignalsnotebooks</span></strong> are fully operational._____no_output_____<img src="../../images/other/anaconda_download_version.gif">_____no_output_____<p class="steps">5 - After defining the directory where the downloaded file will be stored, please, wait a few minutes for the end of transfer</p>
<span class="color13" style="font-size:30px">⚠</span>
The waiting time will depend on the quality of the Internet connection !_____no_output_____<p class="steps">6 - When download is finished navigate through your directory tree until reaching the folder where the downloaded file is located</p>
In our case the destination folder was <img src="../../images/other/anaconda_download_location.png" style="display:inline;margin-top:0px">_____no_output_____<p class="steps">7 - Execute <span class="color4">Anaconda</span> installer file with a double-click</p>_____no_output_____<img src="../../images/other/anaconda_download_installer.gif">_____no_output_____<p class="steps">8 - Follow the sequential instructions presented on the <span class="color4">Anaconda</span> installer</p>_____no_output_____<img src="../../images/other/anaconda_download_install_steps.gif">_____no_output_____<p class="steps">9 - <span class="color4">Jupyter Notebook</span> environment is included on the previous installation. For starting your first Notebook execute <span class="color4">Jupyter Notebook</span></p>
Launch from "Anaconda Navigator" or through a command window, like described on the following steps.
<p class="steps">9.1 - For executing <span class="color4">Jupyter Notebook</span> environment you should open a <strong>console</strong> (in your operating system).</p>
<i>If you are a Microsoft Windows native, just type click on Windows logo (bottom-left corner of the screen) and type "cmd". Then press "Enter".</i>_____no_output_____<p class="steps">9.2 - Type <strong>"jupyter notebook"</strong> inside the opened console. A local <span class="color4"><strong>Jupyter Notebook</strong></span> server will be launched.</p>_____no_output_____<img src="../../images/other/open_jupyter.gif">_____no_output_____<p class="steps">10 - Create a blank Notebook</p>
<p class="steps">10.1 - Now, you should navigate through your directories until reaching the folder where you want to create or open a Notebook (as demonstrated in the following video)</p>
<span class="color13" style="font-size:30px">⚠</span>
<p style="margin-top:0px">You should note that your folder hierarchy is unique, so, the steps followed in the next image, will depend on your folder organisation, being merely illustrative </p>_____no_output_____<img src="../../images/other/create_notebook_part1.gif">_____no_output_____<p class="steps">10.2 - For creating a new Notebook, "New" button (top-right zone of Jupyter Notebook interface) should be pressed and <span class="color1"><strong>Python 3</strong></span> option selected.</p>
<i>A blank Notebook will arise and now you just need to be creative and expand your thoughts to others persons!!!</i>_____no_output_____<img src="../../images/other/create_notebook_part2.gif">_____no_output_____This can be the start of something great. Now you have all the software conditions to create and develop interactive tutorials, combining Python with HTML !
<span class="color4"><strong>Anaconda</strong></span> contains lots of additional functionalities, namely <a href="https://anaconda.org/anaconda/spyder"><span class="color7"><strong>Spyder <img src="../../images/icons/link.png" width="10px" height="10px" style="display:inline"></strong></span></a>, which is an intuitive Python editor for creating and testing your own scripts.
<strong><span class="color7">We hope that you have enjoyed this guide. </span><span class="color2">biosignalsnotebooks</span><span class="color4"> is an environment in continuous expansion, so don't stop your journey and learn more with the remaining <a href="../MainFiles/biosignalsnotebooks.ipynb">Notebooks <img src="../../images/icons/link.png" width="10px" height="10px" style="display:inline"></a></span></strong> ! _____no_output_____<hr>
<table width="100%">
<tr>
<td style="border-right:solid 3px #009EE3" width="20%">
<img src="../../images/ost_logo.png">
</td>
<td width="40%" style="text-align:left">
<a href="../MainFiles/aux_files/biosignalsnotebooks_presentation.pdf" target="_blank">☌ Project Presentation</a>
<br>
<a href="https://github.com/biosignalsnotebooks/biosignalsnotebooks" target="_blank">☌ GitHub Repository</a>
<br>
<a href="https://pypi.org/project/biosignalsnotebooks/" target="_blank">☌ How to install biosignalsnotebooks Python package ?</a>
<br>
<a href="../MainFiles/signal_samples.ipynb">☌ Signal Library</a>
</td>
<td width="40%" style="text-align:left">
<a href="../MainFiles/biosignalsnotebooks.ipynb">☌ Notebook Categories</a>
<br>
<a href="../MainFiles/by_diff.ipynb">☌ Notebooks by Difficulty</a>
<br>
<a href="../MainFiles/by_signal_type.ipynb">☌ Notebooks by Signal Type</a>
<br>
<a href="../MainFiles/by_tag.ipynb">☌ Notebooks by Tag</a>
</td>
</tr>
</table>_____no_output_____
<code>
from biosignalsnotebooks.__notebook_support__ import css_style_apply
css_style_apply()_____no_output_____%%html
<script>
// AUTORUN ALL CELLS ON NOTEBOOK-LOAD!
require(
['base/js/namespace', 'jquery'],
function(jupyter, $) {
$(jupyter.events).on("kernel_ready.Kernel", function () {
console.log("Auto-running all cells-below...");
jupyter.actions.call('jupyter-notebook:run-all-cells-below');
jupyter.actions.call('jupyter-notebook:save-notebook');
});
}
);
</script>_____no_output_____
</code>
| {
"repository": "biosignalsnotebooks/biosignalsnotebooks",
"path": "notebookToHtml/biosignalsnotebooks_html_publish/Categories/Install/prepare_anaconda.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 7,
"size": 79309,
"hexsha": "d0b4f29894b39852f4d005979945e8e6f67b24f2",
"max_line_length": 5029,
"avg_line_length": 43.7687637969,
"alphanum_fraction": 0.5220340693
} |
# Notebook from gopala-kr/ds-notebooks
Path: ipython-notebooks/notebooks/libraries/DEAP.ipynb
# DEAP_____no_output_____DEAP is a novel evolutionary computation framework for rapid prototyping and testing of ideas. It seeks to make algorithms explicit and data structures transparent. It works in perfect harmony with parallelisation mechanism such as multiprocessing and SCOOP. The following documentation presents the key concepts and many features to build your own evolutions.
Library documentation: <a>http://deap.readthedocs.org/en/master/</a>_____no_output_____## One Max Problem (GA)_____no_output_____This problem is very simple, we search for a 1 filled list individual. This problem is widely used in the evolutionary computation community since it is very simple and it illustrates well the potential of evolutionary algorithms._____no_output_____
<code>
import random
from deap import base
from deap import creator
from deap import tools_____no_output_____# creator is a class factory that can build new classes at run-time
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)_____no_output_____# a toolbox stores functions and their arguments
toolbox = base.Toolbox()
# attribute generator
toolbox.register("attr_bool", random.randint, 0, 1)
# structure initializers
toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)_____no_output_____# evaluation function
def evalOneMax(individual):
return sum(individual),_____no_output_____# register the required genetic operators
toolbox.register("evaluate", evalOneMax)
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutFlipBit, indpb=0.05)
toolbox.register("select", tools.selTournament, tournsize=3)_____no_output_____random.seed(64)
# instantiate a population
pop = toolbox.population(n=300)
CXPB, MUTPB, NGEN = 0.5, 0.2, 40
# evaluate the entire population
fitnesses = list(map(toolbox.evaluate, pop))
for ind, fit in zip(pop, fitnesses):
ind.fitness.values = fit
print(" Evaluated %i individuals" % len(pop)) Evaluated 300 individuals
# begin the evolution
for g in range(NGEN):
print("-- Generation %i --" % g)
# select the next generation individuals
offspring = toolbox.select(pop, len(pop))
# clone the selected individuals
offspring = list(map(toolbox.clone, offspring))
# apply crossover and mutation on the offspring
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
# evaluate the individuals with an invalid fitness
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = map(toolbox.evaluate, invalid_ind)
for ind, fit in zip(invalid_ind, fitnesses):
ind.fitness.values = fit
print(" Evaluated %i individuals" % len(invalid_ind))
# the population is entirely replaced by the offspring
pop[:] = offspring
# gather all the fitnesses in one list and print the stats
fits = [ind.fitness.values[0] for ind in pop]
length = len(pop)
mean = sum(fits) / length
sum2 = sum(x*x for x in fits)
std = abs(sum2 / length - mean**2)**0.5
print(" Min %s" % min(fits))
print(" Max %s" % max(fits))
print(" Avg %s" % mean)
print(" Std %s" % std)-- Generation 0 --
Evaluated 189 individuals
Min 40.0
Max 65.0
Avg 54.7433333333
Std 4.46289766358
-- Generation 1 --
Evaluated 171 individuals
Min 44.0
Max 70.0
Avg 58.48
Std 3.98533980149
-- Generation 2 --
Evaluated 169 individuals
Min 54.0
Max 68.0
Avg 61.6066666667
Std 2.92779021714
-- Generation 3 --
Evaluated 185 individuals
Min 57.0
Max 73.0
Avg 63.82
Std 2.74364720764
-- Generation 4 --
Evaluated 175 individuals
Min 54.0
Max 73.0
Avg 65.67
Std 2.57961883489
-- Generation 5 --
Evaluated 164 individuals
Min 60.0
Max 76.0
Avg 67.5466666667
Std 2.57833710407
-- Generation 6 --
Evaluated 185 individuals
Min 63.0
Max 77.0
Avg 69.0666666667
Std 2.50510589707
-- Generation 7 --
Evaluated 194 individuals
Min 62.0
Max 78.0
Avg 70.78
Std 2.39963886172
-- Generation 8 --
Evaluated 199 individuals
Min 63.0
Max 79.0
Avg 72.3133333333
Std 2.57717330077
-- Generation 9 --
Evaluated 169 individuals
Min 67.0
Max 81.0
Avg 74.0
Std 2.62551582234
-- Generation 10 --
Evaluated 180 individuals
Min 67.0
Max 83.0
Avg 75.9166666667
Std 2.52910831893
-- Generation 11 --
Evaluated 193 individuals
Min 67.0
Max 84.0
Avg 77.5966666667
Std 2.40291258453
-- Generation 12 --
Evaluated 177 individuals
Min 72.0
Max 85.0
Avg 78.97
Std 2.29690371297
-- Generation 13 --
Evaluated 195 individuals
Min 70.0
Max 86.0
Avg 80.13
Std 2.35650164439
-- Generation 14 --
Evaluated 175 individuals
Min 74.0
Max 86.0
Avg 81.3966666667
Std 2.03780655499
-- Generation 15 --
Evaluated 181 individuals
Min 74.0
Max 87.0
Avg 82.33
Std 2.18504767301
-- Generation 16 --
Evaluated 198 individuals
Min 74.0
Max 88.0
Avg 83.4033333333
Std 2.22575580172
-- Generation 17 --
Evaluated 190 individuals
Min 72.0
Max 88.0
Avg 84.14
Std 2.34955314901
-- Generation 18 --
Evaluated 170 individuals
Min 76.0
Max 89.0
Avg 85.1
Std 2.20529665427
-- Generation 19 --
Evaluated 189 individuals
Min 75.0
Max 90.0
Avg 85.77
Std 2.1564863397
-- Generation 20 --
Evaluated 188 individuals
Min 77.0
Max 91.0
Avg 86.4833333333
Std 2.2589943682
-- Generation 21 --
Evaluated 180 individuals
Min 80.0
Max 91.0
Avg 87.24
Std 2.0613264338
-- Generation 22 --
Evaluated 179 individuals
Min 80.0
Max 92.0
Avg 87.95
Std 1.95298916194
-- Generation 23 --
Evaluated 196 individuals
Min 79.0
Max 93.0
Avg 88.42
Std 2.2249194742
-- Generation 24 --
Evaluated 168 individuals
Min 82.0
Max 93.0
Avg 89.2833333333
Std 1.89289607627
-- Generation 25 --
Evaluated 186 individuals
Min 78.0
Max 94.0
Avg 89.7666666667
Std 2.26102238428
-- Generation 26 --
Evaluated 182 individuals
Min 82.0
Max 94.0
Avg 90.4633333333
Std 2.21404356075
-- Generation 27 --
Evaluated 179 individuals
Min 81.0
Max 95.0
Avg 90.8733333333
Std 2.41328729238
-- Generation 28 --
Evaluated 183 individuals
Min 83.0
Max 95.0
Avg 91.7166666667
Std 2.18701978856
-- Generation 29 --
Evaluated 167 individuals
Min 83.0
Max 98.0
Avg 92.3466666667
Std 2.21656390739
-- Generation 30 --
Evaluated 170 individuals
Min 84.0
Max 98.0
Avg 92.9533333333
Std 2.09868742048
-- Generation 31 --
Evaluated 172 individuals
Min 83.0
Max 97.0
Avg 93.5266666667
Std 2.28238666507
-- Generation 32 --
Evaluated 196 individuals
Min 86.0
Max 98.0
Avg 94.28
Std 2.16985406575
-- Generation 33 --
Evaluated 176 individuals
Min 85.0
Max 98.0
Avg 94.9133333333
Std 2.22392046221
-- Generation 34 --
Evaluated 176 individuals
Min 86.0
Max 99.0
Avg 95.6333333333
Std 2.13359373411
-- Generation 35 --
Evaluated 174 individuals
Min 86.0
Max 99.0
Avg 96.2966666667
Std 2.23651266236
-- Generation 36 --
Evaluated 174 individuals
Min 87.0
Max 100.0
Avg 96.5866666667
Std 2.41436442062
-- Generation 37 --
Evaluated 195 individuals
Min 84.0
Max 100.0
Avg 97.3666666667
Std 2.16153237825
-- Generation 38 --
Evaluated 180 individuals
Min 89.0
Max 100.0
Avg 97.7466666667
Std 2.32719191779
-- Generation 39 --
Evaluated 196 individuals
Min 88.0
Max 100.0
Avg 98.1833333333
Std 2.33589145486
best_ind = tools.selBest(pop, 1)[0]
print("Best individual is %s, %s" % (best_ind, best_ind.fitness.values))Best individual is [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], (100.0,)
</code>
## Symbolic Regression (GP)_____no_output_____Symbolic regression is one of the best known problems in GP. It is commonly used as a tuning problem for new algorithms, but is also widely used with real-life distributions, where other regression methods may not work.
All symbolic regression problems use an arbitrary data distribution, and try to fit the most accurately the data with a symbolic formula. Usually, a measure like the RMSE (Root Mean Square Error) is used to measure an individual’s fitness.
In this example, we use a classical distribution, the quartic polynomial (x^4 + x^3 + x^2 + x), a one-dimension distribution. 20 equidistant points are generated in the range [-1, 1], and are used to evaluate the fitness._____no_output_____
<code>
import operator
import math
import random
import numpy
from deap import algorithms
from deap import base
from deap import creator
from deap import tools
from deap import gp
# define a new function for divison that guards against divide by 0
def protectedDiv(left, right):
try:
return left / right
except ZeroDivisionError:
return 1_____no_output_____# add aritmetic primitives
pset = gp.PrimitiveSet("MAIN", 1)
pset.addPrimitive(operator.add, 2)
pset.addPrimitive(operator.sub, 2)
pset.addPrimitive(operator.mul, 2)
pset.addPrimitive(protectedDiv, 2)
pset.addPrimitive(operator.neg, 1)
pset.addPrimitive(math.cos, 1)
pset.addPrimitive(math.sin, 1)
# constant terminal
pset.addEphemeralConstant("rand101", lambda: random.randint(-1,1))
# define number of inputs
pset.renameArguments(ARG0='x')_____no_output_____# create fitness and individual objects
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin)_____no_output_____# register evolution process parameters through the toolbox
toolbox = base.Toolbox()
toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2)
toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("compile", gp.compile, pset=pset)
# evaluation function
def evalSymbReg(individual, points):
# transform the tree expression in a callable function
func = toolbox.compile(expr=individual)
# evaluate the mean squared error between the expression
# and the real function : x**4 + x**3 + x**2 + x
sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points)
return math.fsum(sqerrors) / len(points),
toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)])
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("mate", gp.cxOnePoint)
toolbox.register("expr_mut", gp.genFull, min_=0, max_=2)
toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset)
# prevent functions from getting too deep/complex
toolbox.decorate("mate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17))
toolbox.decorate("mutate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17))_____no_output_____# compute some statistics about the population
stats_fit = tools.Statistics(lambda ind: ind.fitness.values)
stats_size = tools.Statistics(len)
mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size)
mstats.register("avg", numpy.mean)
mstats.register("std", numpy.std)
mstats.register("min", numpy.min)
mstats.register("max", numpy.max)_____no_output_____random.seed(318)
pop = toolbox.population(n=300)
hof = tools.HallOfFame(1)
# run the algorithm
pop, log = algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats=mstats,
halloffame=hof, verbose=True) fitness size
--------------------------------------- -------------------------------
gen nevals avg max min std avg max min std
0 300 2.39949 59.2593 0.165572 4.64122 3.69667 7 2 1.61389
1 146 1.0971 10.1 0.165572 0.845978 3.80667 13 1 1.78586
2 169 0.902365 6.5179 0.165572 0.72362 4.16 13 1 2.0366
3 167 0.852725 9.6327 0.165572 0.869381 4.63667 13 1 2.20408
4 158 0.74829 14.1573 0.165572 1.01281 4.88333 13 1 2.14392
5 160 0.630299 7.90605 0.165572 0.904373 5.52333 14 1 2.09351
6 181 0.495118 4.09456 0.165572 0.524658 6.08333 13 1 1.99409
7 170 0.403873 2.6434 0.165572 0.440596 6.34667 14 1 1.84386
8 173 0.393405 2.9829 0.165572 0.425415 6.37 12 1 1.78132
9 168 0.414299 13.5996 0.165572 0.841226 6.25333 11 2 1.76328
10 142 0.384179 4.07808 0.165572 0.477269 6.25667 13 1 1.78067
11 156 0.459639 19.8316 0.165572 1.47254 6.35333 15 1 2.04983
12 167 0.384348 6.79674 0.165572 0.495807 6.25 13 1 1.92029
13 157 0.42446 11.0636 0.165572 0.818953 6.43667 15 1 2.11959
14 175 0.342257 2.552 0.165572 0.325872 6.23333 15 1 2.14295
15 154 0.442374 13.8349 0.165572 0.950612 6.05667 14 1 1.90266
16 181 0.455697 19.7228 0.101561 1.39528 6.08667 13 1 1.84006
17 178 0.36256 2.54124 0.101561 0.340555 6.24 15 1 2.20055
18 171 0.411532 14.2339 0.101561 0.897785 6.44 15 1 2.2715
19 156 0.43193 15.5923 0.101561 0.9949 6.66667 15 1 2.40185
20 169 0.398163 4.09456 0.0976781 0.450231 6.96667 15 1 2.62022
21 162 0.385774 4.09456 0.0976781 0.421867 7.13 14 1 2.65577
22 162 0.35318 2.55465 0.0253803 0.389453 7.66667 19 2 3.04995
23 164 0.3471 3.66792 0.0253803 0.482334 8.24 21 1 3.48364
24 159 1.46248 331.247 0.0253803 19.0841 9.42667 19 3 3.238
25 164 0.382697 6.6452 0.0173316 0.652247 10.1867 25 1 3.46292
26 139 0.367651 11.9045 0.0173316 0.855067 10.67 19 3 3.32582
27 167 0.345866 6.6452 0.0173316 0.586155 11.4 27 3 3.44384
28 183 0.388404 4.53076 0.0173316 0.58986 11.5767 24 3 3.4483
29 163 0.356009 6.33264 0.0173316 0.563266 12.2433 29 2 4.23211
30 174 0.31506 2.54124 0.0173316 0.412507 12.92 27 3 4.5041
31 206 0.361197 2.9829 0.0173316 0.486155 13.9333 33 1 5.6747
32 168 0.302704 4.01244 0.0173316 0.502277 15.04 31 3 5.40849
33 160 0.246509 3.30873 0.012947 0.433212 16.3967 34 2 5.66092
34 158 0.344791 26.1966 0.012947 1.57277 17.39 43 1 6.13008
35 162 0.212572 2.85856 0.0148373 0.363023 17.64 37 2 6.04349
36 183 0.240268 5.06093 0.0112887 0.482794 17.4333 41 3 6.33184
37 185 0.514635 65.543 0.0103125 3.7864 16.6167 41 1 6.58456
38 134 0.340433 11.2506 0.0103125 0.827213 16.2733 34 1 6.08484
39 158 0.329797 15.8145 4.50668e-33 1.05693 16.4133 34 1 6.09993
40 164 0.306543 14.3573 4.50668e-33 0.947046 17.9033 53 2 8.23695
</code>
| {
"repository": "gopala-kr/ds-notebooks",
"path": "ipython-notebooks/notebooks/libraries/DEAP.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 1,
"size": 23160,
"hexsha": "d0b5a6e9a6927dca671a11e7b4e4dd99738e13fe",
"max_line_length": 369,
"avg_line_length": 33.6139332366,
"alphanum_fraction": 0.5386873921
} |
# Notebook from tAndreani/scATAC-benchmarking
Path: Extra/Cusanovich_2018_subset/test_blacklist/run_umap_cusanovich2018subset_no_blacklist_filtering.ipynb
<code>
library(data.table)
library(dplyr)
library(Matrix)
library(BuenColors)
library(stringr)
library(cowplot)
library(SummarizedExperiment)
library(chromVAR)
library(BSgenome.Hsapiens.UCSC.hg19)
library(JASPAR2016)
library(motifmatchr)
library(GenomicRanges)
library(irlba)
library(cicero)
library(umap)
library(cisTopic)
library(prabclus)
library(BrockmanR)
library(jackstraw)
library(RColorBrewer)
Attaching package: ‘dplyr’
The following objects are masked from ‘package:data.table’:
between, first, last
The following objects are masked from ‘package:stats’:
filter, lag
The following objects are masked from ‘package:base’:
intersect, setdiff, setequal, union
Loading required package: MASS
Attaching package: ‘MASS’
The following object is masked from ‘package:dplyr’:
select
Loading required package: ggplot2
Attaching package: ‘cowplot’
The following object is masked from ‘package:ggplot2’:
ggsave
Loading required package: GenomicRanges
Loading required package: stats4
Loading required package: BiocGenerics
Loading required package: parallel
Attaching package: ‘BiocGenerics’
The following objects are masked from ‘package:parallel’:
clusterApply, clusterApplyLB, clusterCall, clusterEvalQ,
clusterExport, clusterMap, parApply, parCapply, parLapply,
parLapplyLB, parRapply, parSapply, parSapplyLB
The following objects are masked from ‘package:Matrix’:
colMeans, colSums, rowMeans, rowSums, which
The following objects are masked from ‘package:dplyr’:
combine, intersect, setdiff, union
The following objects are masked from ‘package:stats’:
IQR, mad, sd, var, xtabs
The following objects are masked from ‘package:base’:
anyDuplicated, append, as.data.frame, basename, cbind, colMeans,
colnames, colSums, dirname, do.call, duplicated, eval, evalq,
Filter, Find, get, grep, grepl, intersect, is.unsorted, lapply,
lengths, Map, mapply, match, mget, order, paste, pmax, pmax.int,
pmin, pmin.int, Position, rank, rbind, Reduce, rowMeans, rownames,
rowSums, sapply, setdiff, sort, table, tapply, union, unique,
unsplit, which, which.max, which.min
Loading required package: S4Vectors
Attaching package: ‘S4Vectors’
The following object is masked from ‘package:Matrix’:
expand
The following objects are masked from ‘package:dplyr’:
first, rename
The following objects are masked from ‘package:data.table’:
first, second
The following object is masked from ‘package:base’:
expand.grid
Loading required package: IRanges
Attaching package: ‘IRanges’
The following objects are masked from ‘package:dplyr’:
collapse, desc, slice
The following object is masked from ‘package:data.table’:
shift
Loading required package: GenomeInfoDb
Loading required package: Biobase
Welcome to Bioconductor
Vignettes contain introductory material; view with
'browseVignettes()'. To cite Bioconductor, see
'citation("Biobase")', and for packages 'citation("pkgname")'.
Loading required package: DelayedArray
Loading required package: matrixStats
Attaching package: ‘matrixStats’
The following objects are masked from ‘package:Biobase’:
anyMissing, rowMedians
The following object is masked from ‘package:dplyr’:
count
Loading required package: BiocParallel
Attaching package: ‘DelayedArray’
The following objects are masked from ‘package:matrixStats’:
colMaxs, colMins, colRanges, rowMaxs, rowMins, rowRanges
The following objects are masked from ‘package:base’:
aperm, apply
Loading required package: BSgenome
Loading required package: Biostrings
Loading required package: XVector
Attaching package: ‘Biostrings’
The following object is masked from ‘package:DelayedArray’:
type
The following object is masked from ‘package:base’:
strsplit
Loading required package: rtracklayer
Loading required package: monocle
Loading required package: VGAM
Loading required package: splines
Loading required package: DDRTree
Loading required package: Gviz
Loading required package: grid
Warning message:
"replacing previous import 'GenomicRanges::shift' by 'data.table::shift' when loading 'cisTopic'"Warning message:
"replacing previous import 'data.table::last' by 'dplyr::last' when loading 'cisTopic'"Warning message:
"replacing previous import 'GenomicRanges::union' by 'dplyr::union' when loading 'cisTopic'"Warning message:
"replacing previous import 'GenomicRanges::intersect' by 'dplyr::intersect' when loading 'cisTopic'"Warning message:
"replacing previous import 'GenomicRanges::setdiff' by 'dplyr::setdiff' when loading 'cisTopic'"Warning message:
"replacing previous import 'data.table::first' by 'dplyr::first' when loading 'cisTopic'"Warning message:
"replacing previous import 'data.table::between' by 'dplyr::between' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::failwith' by 'plyr::failwith' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::id' by 'plyr::id' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::summarize' by 'plyr::summarize' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::count' by 'plyr::count' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::desc' by 'plyr::desc' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::mutate' by 'plyr::mutate' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::arrange' by 'plyr::arrange' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::rename' by 'plyr::rename' when loading 'cisTopic'"Warning message:
"replacing previous import 'dplyr::summarise' by 'plyr::summarise' when loading 'cisTopic'"Loading required package: mclust
Package 'mclust' version 5.4.3
Type 'citation("mclust")' for citing this R package in publications.
Loading required package: tsne
</code>
#### define functions_____no_output_____
<code>
read_FM <- function(filename){
df_FM = data.frame(readRDS(filename),stringsAsFactors=FALSE,check.names=FALSE)
rownames(df_FM) <- make.names(rownames(df_FM), unique=TRUE)
df_FM[is.na(df_FM)] <- 0
return(df_FM)
}
run_pca <- function(mat,num_pcs=50,remove_first_PC=FALSE,scale=FALSE,center=FALSE){
set.seed(2019)
mat = as.matrix(mat)
SVD = irlba(mat, num_pcs, num_pcs,scale=scale,center=center)
sk_diag = matrix(0, nrow=num_pcs, ncol=num_pcs)
diag(sk_diag) = SVD$d
if(remove_first_PC){
sk_diag[1,1] = 0
SVD_vd = (sk_diag %*% t(SVD$v))[2:num_pcs,]
}else{
SVD_vd = sk_diag %*% t(SVD$v)
}
return(SVD_vd)
}
elbow_plot <- function(mat,num_pcs=50,scale=FALSE,center=FALSE,title='',width=3,height=3){
set.seed(2019)
mat = data.matrix(mat)
SVD = irlba(mat, num_pcs, num_pcs,scale=scale,center=center)
options(repr.plot.width=width, repr.plot.height=height)
df_plot = data.frame(PC=1:num_pcs, SD=SVD$d);
# print(SVD$d[1:num_pcs])
p <- ggplot(df_plot, aes(x = PC, y = SD)) +
geom_point(col="#cd5c5c",size = 1) +
ggtitle(title)
return(p)
}
run_umap <- function(fm_mat){
umap_object = umap(t(fm_mat),random_state = 2019)
df_umap = umap_object$layout
return(df_umap)
}
plot_umap <- function(df_umap,labels,title='UMAP',colormap=colormap){
set.seed(2019)
df_umap = data.frame(cbind(df_umap,labels),stringsAsFactors = FALSE)
colnames(df_umap) = c('umap1','umap2','celltype')
df_umap$umap1 = as.numeric(df_umap$umap1)
df_umap$umap2 = as.numeric(df_umap$umap2)
options(repr.plot.width=4, repr.plot.height=4)
p <- ggplot(shuf(df_umap), aes(x = umap1, y = umap2, color = celltype)) +
geom_point(size = 1) + scale_color_manual(values = colormap) +
ggtitle(title)
return(p)
}_____no_output_____
</code>
### Input_____no_output_____
<code>
workdir = '../output/'
path_umap = paste0(workdir,'umap_rds/')
system(paste0('mkdir -p ',path_umap))
path_fm = paste0(workdir,'feature_matrices/')_____no_output_____metadata <- read.table('../input/metadata.tsv',
header = TRUE,
stringsAsFactors=FALSE,quote="",row.names=1)_____no_output_____list.files(path_fm,pattern="^FM*")_____no_output_____# read in feature matrices and double check if cell names of feature matrices are consistent with metadata
flag_identical = c()
for (filename in list.files(path_fm,pattern="^FM*")){
filename_split = unlist(strsplit(sub('\\.rds$', '', filename),'_'))
method_i = filename_split[2]
if(method_i == 'chromVAR'){
method_i = paste(filename_split[2],filename_split[4],sep='_')
}
print(paste0('Read in ','fm_',method_i))
assign(paste0('fm_',method_i),read_FM(paste0(path_fm,filename)))
#check if column names are the same
flag_identical[[method_i]] = identical(colnames(eval(as.name(paste0('fm_',method_i)))),
rownames(metadata))
}[1] "Read in fm_SCRAT"
[1] "Read in fm_SnapATAC"
flag_identical_____no_output_____all(flag_identical)_____no_output_____labels = metadata$label
num_colors = length(unique(labels))
colormap = colorRampPalette(brewer.pal(8, "Dark2"))(num_colors)
names(colormap) = unique(metadata$label)_____no_output_____head(labels)_____no_output_____
</code>
### SnapATAC_____no_output_____
<code>
df_umap_SnapATAC <- run_umap(fm_SnapATAC)_____no_output_____head(df_umap_SnapATAC)_____no_output_____p_SnapATAC <- plot_umap(df_umap_SnapATAC,labels = labels,colormap = colormap,title='SnapATAC')
p_SnapATAC_____no_output_____
</code>
### SCRAT_____no_output_____
<code>
df_umap_SCRAT <- run_umap(fm_SCRAT)_____no_output_____p_SCRAT <- plot_umap(df_umap_SCRAT,labels = labels,colormap = colormap,title='SCRAT')
p_SCRAT_____no_output_____
</code>
#### Save feature matrices and UMAP coordinates_____no_output_____
<code>
dataset = 'cusanovich2018subset_no_blacklist_filtering'_____no_output_____saveRDS(df_umap_SnapATAC,paste0(path_umap,'df_umap_SnapATAC.rds'))
saveRDS(df_umap_SCRAT,paste0(path_umap,'df_umap_SCRAT.rds'))_____no_output_____save.image(file = 'run_umap_cusanovich2018subset_no_blacklist_filtering.RData')_____no_output_____fig_width = 8
fig_height = 4
options(repr.plot.width=fig_width, repr.plot.height=fig_height)
combined_fig = cowplot::plot_grid(p_SnapATAC+theme(legend.position = "none"),
p_SCRAT+theme(legend.position = "none"),
labels = "",nrow = 1)_____no_output_____combined_fig_____no_output_____cowplot::ggsave(combined_fig,filename = "Cusanovich_2018_ssubset_no_blacklist_filtering.pdf", width = fig_width, height = fig_height)_____no_output_____cowplot::ggsave(p_SCRAT ,filename = "cusanovich_legend.pdf", width = fig_width, height = fig_height)_____no_output_____
</code>
| {
"repository": "tAndreani/scATAC-benchmarking",
"path": "Extra/Cusanovich_2018_subset/test_blacklist/run_umap_cusanovich2018subset_no_blacklist_filtering.ipynb",
"matched_keywords": [
"Monocle"
],
"stars": 2,
"size": 355943,
"hexsha": "d0b667bbc0faab33851de37119144c01f94bb7df",
"max_line_length": 167112,
"avg_line_length": 465.2849673203,
"alphanum_fraction": 0.9268787418
} |
# Notebook from sju-chem264-2019/9-19-2019-lecture-deannapatti
Path: 9-19-2019 - Lecture Notebook.ipynb
# Plotting and Functions_____no_output_____This notebook will work trough how to plot data and how to define functions. Throughout the lecture we will take a few moments to plot different functions and see how they depend on their parameters_____no_output_____## Plotting in Python: Matplot _____no_output_____
<code>
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp_____no_output_____
</code>
Pyplot is a powerful plotting library that can be used to make publication quaility plots. It is also useful for quikly plotting the results of a calcualtion.
This is a quick demonstration of its use
Note: when you call al library `import matplotlib.pyplot as plt` the way that use it is to do the following `plt.function()` where `function()` is whatever you are trying to call from the library_____no_output_____
<code>
# Define x and y values for some function
x = [i for i in range(20)]
y1 = [i**2 for i in x]
y2 = [i**3 for i in x]_____no_output_____
</code>
The methods used above to make the lists is considered very *pythonic*. It works the same as a loop, but outputs all the results into a list. The left-hand most argument is what the list elements will be and the right hand side is the the way the loop will work._____no_output_____When you use pyplot to make a plot, you can add more than one data set to the figure until you render the plot. Once you render the plot it resets_____no_output_____
<code>
plt.plot(x,y1)
plt.plot(x,y2)
plt.xlabel('X', fontsize=24)
plt.ylabel('Y', fontsize=24)
plt.legend(['Quadratic', 'Cubic'], loc=0)
plt.show()_____no_output_____
</code>
We can call also use numpy fucntions to make our plots. Numpy is a very powerful math library_____no_output_____
<code>
# linspace will make a list of values from initial to final with however many increments you want
# this example goes from 0-2.5 with 20 increments
x=numpy.linspace(0,1.0,20)
print(x)_____no_output_____exp_func=np.exp(-2*np.pi*x)
print(exp_func)_____no_output_____plt.plot(x,exp_func, color="black")
plt.xlabel('x', fontsize=24)
plt.ylabel("y(x)", fontsize=24)
plt.show()_____no_output_____
</code>
All aspects of the plot can be changed. The best way to figure out what you want to do is to go to the Matplotlib gallery and choose an image that looks like what you are trying to do.
https://matplotlib.org/gallery/index.html_____no_output_____### Example: Scatter plot with histograms_____no_output_____
<code>
import numpy as np
#Fixing random state for reproducibility
np.random.seed(19680801)
# the random data
x = np.random.randn(1000)
y = np.random.randn(1000)
# definitions for the axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
spacing = 0.005
rect_scatter = [left, bottom, width, height]
rect_histx = [left, bottom + height + spacing, width, 0.2]
rect_histy = [left + width + spacing, bottom, 0.2, height]
# start with a rectangular Figure
plt.figure(figsize=(8, 8))
ax_scatter = plt.axes(rect_scatter)
ax_scatter.tick_params(direction='in', top=True, right=True)
ax_histx = plt.axes(rect_histx)
ax_histx.tick_params(direction='in', labelbottom=False)
ax_histy = plt.axes(rect_histy)
ax_histy.tick_params(direction='in', labelleft=False)
# the scatter plot:
ax_scatter.scatter(x, y)
# now determine nice limits by hand:
binwidth = 0.25
lim = np.ceil(np.abs([x, y]).max() / binwidth) * binwidth
ax_scatter.set_xlim((-lim, lim))
ax_scatter.set_ylim((-lim, lim))
bins = np.arange(-lim, lim + binwidth, binwidth)
ax_histx.hist(x, bins=bins)
ax_histy.hist(y, bins=bins, orientation='horizontal')
ax_histx.set_xlim(ax_scatter.get_xlim())
ax_histy.set_ylim(ax_scatter.get_ylim())
plt.show()_____no_output_____
</code>
I don't have to be an expert in making that kind of plot. I just have to understand and guess enough to figure out. I also google things I don't know
https://www.google.com/search?client=firefox-b-1-d&q=pyplot+histogram+change+color
https://stackoverflow.com/questions/42172440/python-matplotlib-histogram-color?rq=1
https://matplotlib.org/examples/color/named_colors.html
Then I can make small changes to have the plot look how I want it to look
Notice below I changed
`ax_scatter.scatter(x, y, color="purple")`,
`ax_histx.hist(x, bins=bins, color = "skyblue")`,
`ax_histy.hist(y, bins=bins, orientation='horizontal', color="salmon")`_____no_output_____
<code>
#Fixing random state for reproducibility
np.random.seed(19680801)
# the random data
x = np.random.randn(1000)
y = np.random.randn(1000)
# definitions for the axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
spacing = 0.005
rect_scatter = [left, bottom, width, height]
rect_histx = [left, bottom + height + spacing, width, 0.2]
rect_histy = [left + width + spacing, bottom, 0.2, height]
# start with a rectangular Figure
plt.figure(figsize=(8, 8))
ax_scatter = plt.axes(rect_scatter)
ax_scatter.tick_params(direction='in', top=True, right=True)
ax_histx = plt.axes(rect_histx)
ax_histx.tick_params(direction='in', labelbottom=False)
ax_histy = plt.axes(rect_histy)
ax_histy.tick_params(direction='in', labelleft=False)
# the scatter plot:
ax_scatter.scatter(x, y, color="purple")
# now determine nice limits by hand:
binwidth = 0.25
lim = np.ceil(np.abs([x, y]).max() / binwidth) * binwidth
ax_scatter.set_xlim((-lim, lim))
ax_scatter.set_ylim((-lim, lim))
bins = np.arange(-lim, lim + binwidth, binwidth)
ax_histx.hist(x, bins=bins, color = "skyblue")
ax_histy.hist(y, bins=bins, orientation='horizontal', color="salmon")
ax_histx.set_xlim(ax_scatter.get_xlim())
ax_histy.set_ylim(ax_scatter.get_ylim())
plt.show()_____no_output_____
</code>
Notice how I changed the colors on the plot based off of what I found on the stack exchange. The way to solve issues in the course and computational work is to google them._____no_output_____## Plotting Exersice 1_____no_output_____Find a plot from the gallery that you like. Then make some sort of noticable change to it._____no_output_____
<code>
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
points = np.ones(5) # Draw 5 points for each line
marker_style = dict(color='tab:blue', linestyle=':', marker='o',
markersize=15, markerfacecoloralt='tab:red')
fig, ax = plt.subplots()
# Plot all fill styles.
for y, fill_style in enumerate(Line2D.fillStyles):
ax.text(-0.5, y, repr(fill_style),
horizontalalignment='center', verticalalignment='center')
ax.plot(y * points, fillstyle=fill_style, **marker_style)
ax.set_axis_off()
ax.set_title('fill style')
plt.show()_____no_output_____import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
points = np.ones(5) # Draw 5 points for each line
marker_style = dict(color='tab:green', linestyle=':', marker='o',
markersize=15, markerfacecoloralt='tab:purple')
fig, ax = plt.subplots()
# Plot all fill styles.
for y, fill_style in enumerate(Line2D.fillStyles):
ax.text(-0.5, y, repr(fill_style),
horizontalalignment='center', verticalalignment='center')
ax.plot(y * points, fillstyle=fill_style, **marker_style)
ax.set_axis_off()
ax.set_title('fill style')
plt.show()_____no_output_____
</code>
## Plotting Exersice 2_____no_output_____Plot a the following functions on the same plot from $ -2\pi $ to $2\pi$
$$ \sin(2\pi x+\pi)$$
$$ \cos(2\pi x+\pi)$$
$$\sin(2\pi x+\pi)+\cos(2\pi x+\pi)$$_____no_output_____This might be useful:
https://docs.scipy.org/doc/numpy/reference/generated/numpy.sin.html
https://docs.scipy.org/doc/numpy/reference/generated/numpy.cos.html#numpy.cos_____no_output_____
<code>
np.sin(np.pi/2.)
_____no_output_____np.sin(np.array((0., 30., 45., 60., 90.)) * np.pi / 180. )
_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-np.pi, np.pi, 201)
plt.plot(x, np.sin(x))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-2*np.pi, 2*np.pi, 201)
plt.plot(x, np.sin(x))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-2*np.pi, 2*np.pi, 201)
plt.plot(x, np.sin(2*np.pi*x+np.pi))
plt.plot(x, np.cos(2*np.pi*x+np.pi))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-2*np.pi, 2*np.pi, 201)
plt.plot(x, np.cos(2*np.pi*x+np.pi))
plt.xlabel('Angle [rad]')
plt.ylabel('cos(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-2*np.pi, 2*np.pi, 201)
plt.plot(x, np.sin(2*np.pi*x+np.pi), color="blue")
plt.plot(x, np.cos(2*np.pi*x+np.pi), color="red")
plt.plot(x, np.sin(2*np.pi*x+np.pi)+np.cos(2*np.pi*x+np.pi), color="green")
plt.xlabel('x')
plt.ylabel('y(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-2*np.pi, 2*np.pi, 201)
plt.plot(x, np.sin(2*np.pi*x+np.pi), color="black")
plt.plot(x, np.cos(2*np.pi*x+np.pi), color="red")
plt.plot(x, np.sin(2*np.pi*x+np.pi)+np.cos(2*np.pi*x+np.pi), color="gray")
plt.xlabel('x')
plt.ylabel('y(x)')
plt.axis('tight')
plt.show()_____no_output_____
</code>
# Lecture plots_____no_output_____Periodically during lecture we will take a pause to plot some of the interesting functions that we use in class._____no_output_____## Classical wavefunctions
The following plot shows the the spacial component of the standard wavefunction with a wavelength of $\lambda=\text{1.45 m}$ and a relative amplitude of $A=1$ when the time, $t=0$ and the phase $\phi=1.0$._____no_output_____
<code>
x=np.linspace(0,3.0,100)
sinx=np.sin(2*np.pi*x+0+1)
plt.plot(x,sinx, color="black")
plt.xlabel('x', fontsize=24)
plt.ylabel("y(x)", fontsize=24)
plt.show()_____no_output_____
</code>
Make a new figure where you plot the same wave function at three time points in the future. Assume the frequency is $\nu=.1 \text{ ms / s} $ Use a different color for each plot_____no_output_____## Orthogonality_____no_output_____Graphically show that the the following two functions are orthogonal on the interval $-3\pi$ to $3\pi$
$$ \sin(x) \text{ and } \cos(3x)$$
Plot both functions together, then plot the product of both functions and explain why it is orthogonal_____no_output_____
<code>
import matplotlib.pylab as plt
x = np.linspace(-3*np.pi, 3*np.pi, 201)
plt.plot(x, np.sin(x))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-3*np.pi, 3*np.pi, 201)
plt.plot(x, np.sin(x))
plt.plot(x, np.cos(3*x))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-3*np.pi, 3*np.pi, 201)
prod=np.sin(x)*np.cos(3*x)
plt.plot(x, np.sin(x))
plt.plot(x, np.cos(3*x))
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____import matplotlib.pylab as plt
x = np.linspace(-3*np.pi, 3*np.pi, 201)
prod=np.sin(x)*np.cos(3*x)
plt.plot(x, prod, color="blue")
plt.xlabel('Angle [rad]')
plt.ylabel('sin(x)')
plt.axis('tight')
plt.show()_____no_output_____prod=np.sin(x)*np.cos(3*x)
_____no_output_____prod=np.sin(x)*np.cos(3*x)
x = np.linspace(-3*np.pi, 3*np.pi, 201)
exp_func=prod
np.trapz(exp_func,x)_____no_output_____
</code>
Use the numpy trapezoid rule integrator to show the the two functions are orthogonal
`np.trapz(y,x)`
https://docs.scipy.org/doc/numpy/reference/generated/numpy.trapz.html_____no_output_____
<code>
# Example code
x=numpy.linspace(0,1.0,20)
exp_func=np.exp(-2*np.pi*x)
np.trapz(exp_func,x)_____no_output_____# Your code here_____no_output_____
</code>
| {
"repository": "sju-chem264-2019/9-19-2019-lecture-deannapatti",
"path": "9-19-2019 - Lecture Notebook.ipynb",
"matched_keywords": [
"Salmon"
],
"stars": null,
"size": 453303,
"hexsha": "d0b866bf452e042623f3b6802297b8949ab5f4ea",
"max_line_length": 59152,
"avg_line_length": 492.7206521739,
"alphanum_fraction": 0.9457073966
} |
# Notebook from ML-Bioinfo-CEITEC/genomic_benchmarks
Path: docs/human_ensembl_regulatory/create_datasets.ipynb
# Prepare environment_____no_output_____
<code>
!pip install git+https://github.com/katarinagresova/ensembl_scraper.git@6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fdCollecting git+https://github.com/katarinagresova/ensembl_scraper.git@6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fd
Cloning https://github.com/katarinagresova/ensembl_scraper.git (to revision 6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fd) to /tmp/pip-req-build-fz97hoif
Running command git clone --filter=blob:none -q https://github.com/katarinagresova/ensembl_scraper.git /tmp/pip-req-build-fz97hoif
Running command git rev-parse -q --verify 'sha^6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fd'
Running command git fetch -q https://github.com/katarinagresova/ensembl_scraper.git 6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fd
Resolved https://github.com/katarinagresova/ensembl_scraper.git to commit 6d3bba8e6be7f5ead58a3bbaed6a4e8cd35e62fd
Preparing metadata (setup.py) ... [?25ldone
[?25hRequirement already satisfied: bio in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.3.3)
Requirement already satisfied: biopython in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.79)
Requirement already satisfied: certifi in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (2021.10.8)
Requirement already satisfied: charset-normalizer in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (2.0.11)
Requirement already satisfied: idna in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (3.3)
Requirement already satisfied: joblib in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.1.0)
Requirement already satisfied: numpy in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.22.2)
Requirement already satisfied: pandas in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.4.0)
Requirement already satisfied: plac in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.3.4)
Requirement already satisfied: pyfiglet in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (0.8.post1)
Requirement already satisfied: python-dateutil in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (2.8.2)
Requirement already satisfied: pytz in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (2021.3)
Requirement already satisfied: PyYAML>=5.4.1 in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (6.0)
Requirement already satisfied: requests in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (2.27.1)
Requirement already satisfied: scikit-learn in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.0.2)
Requirement already satisfied: scipy in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.8.0)
Requirement already satisfied: six in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.16.0)
Requirement already satisfied: threadpoolctl in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (3.1.0)
Requirement already satisfied: tqdm in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (4.62.3)
Requirement already satisfied: twobitreader in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (3.1.7)
Requirement already satisfied: urllib3 in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from scraper==0.0.1) (1.26.8)
Requirement already satisfied: mygene in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from bio->scraper==0.0.1) (3.2.2)
Requirement already satisfied: biothings-client>=0.2.6 in /home/katarina/git/genomic_benchmarks/venv/lib/python3.8/site-packages (from mygene->bio->scraper==0.0.1) (0.2.6)
[33mWARNING: You are using pip version 21.3.1; however, version 22.0.3 is available.
You should consider upgrading via the '/home/katarina/git/genomic_benchmarks/venv/bin/python -m pip install --upgrade pip' command.[0m
</code>
# Create config file_____no_output_____
<code>
import yaml
config = {
"root_dir": "../../datasets/",
"organisms": {
"homo_sapiens": {
"regulatory_feature"
}
}
}
user_config = 'user_config.yaml'
with open(user_config, 'w') as handle:
yaml.dump(config, handle)_____no_output_____
</code>
# Prepare directories_____no_output_____
<code>
from pathlib import Path
BASE_FILE_PATH = Path("../../datasets/human_ensembl_regulatory/")
# copied from https://stackoverflow.com/a/57892171
def rm_tree(pth: Path):
for child in pth.iterdir():
if child.is_file():
child.unlink()
else:
rm_tree(child)
pth.rmdir()
if BASE_FILE_PATH.exists():
rm_tree(BASE_FILE_PATH)_____no_output_____
</code>
# Run tool_____no_output_____
<code>
!python -m scraper.ensembl_scraper -c user_config.yamlProcessing organisms: 0%| | 0/1 [00:00<?, ?it/s]
Processing feature files: 0%| | 0/1 [00:00<?, ?it/s][AINFO:root:download_file(): Going to download file from path ftp://ftp.ensembl.org/pub/release-100/mysql/regulation_mart_100/hsapiens_regulatory_feature__regulatory_feature__main.txt.gz
INFO:root:download_file(): File downloaded to path ../../datasets//tmp//homo_sapiens_regulatory_feature.txt.gz.
INFO:root:parse_feature_file(): Going to parse file ../../datasets//tmp//homo_sapiens_regulatory_feature.txt.gz
INFO:root:parse_feature_file(): Done parsing file ../../datasets//tmp//homo_sapiens_regulatory_feature.txt.gz
Processing feature types: 0%| | 0/6 [00:00<?, ?it/s][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 downloaded to path ../../datasets//tmp/hg38.2bit.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 141250
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 141250
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 123915
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 123909
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 17%|██▌ | 1/6 [46:10<3:50:52, 2770.60s/it][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 177376
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 177376
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 152129
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 152106
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 33%|████▎ | 2/6 [1:17:19<2:29:21, 2240.25s/it][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 132592
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 132592
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 106894
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 106890
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 50%|██████▌ | 3/6 [1:37:08<1:28:00, 1760.04s/it][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 97099
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 96572
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 87381
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 87378
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 67%|██████████ | 4/6 [1:51:27<46:48, 1404.49s/it][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 35191
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 35191
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 32260
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 32258
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 83%|█████████████▎ | 5/6 [1:54:37<16:06, 966.54s/it][A[AINFO:root:find_sequences(): Going to find sequences based on genomic loci.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:find_sequences(): Done finding sequences.
INFO:root:remove_low_quality(): Going to preprocess sequences.
INFO:root:remove_low_quality(): Original number of sequences: 30436
INFO:root:remove_low_quality(): Number of sequences after contigs rejection: 29820
INFO:root:remove_low_quality(): Number of sequences after outlier rejection: 25816
INFO:root:remove_low_quality(): Number of sequences after Ns rejection: 25816
INFO:root:remove_low_quality(): Done preprocessing sequences.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
INFO:root:download_2bit_file(): Going to download 2bit file hg38
INFO:root:download_2bit_file(): File for hg38 already exists. Not going to download.
Processing feature types: 100%|███████████████| 6/6 [1:56:39<00:00, 1166.62s/it][A[A
Processing feature files: 100%|███████████████| 1/1 [1:56:59<00:00, 7019.15s/it][A
Processing organisms: 100%|███████████████████| 1/1 [1:56:59<00:00, 7019.15s/it]
</code>
# Reformating_____no_output_____
<code>
!mkdir -p ../../datasets/human_ensembl_regulatory/train
!mkdir -p ../../datasets/human_ensembl_regulatory/test_____no_output_____!mv ../../datasets/homo_sapiens_regulatory_feature_open_chromatin_region/train/positive.csv ../../datasets/human_ensembl_regulatory/train/ocr.csv
!mv ../../datasets/homo_sapiens_regulatory_feature_open_chromatin_region/test/positive.csv ../../datasets/human_ensembl_regulatory/test/ocr.csv
!mv ../../datasets/homo_sapiens_regulatory_feature_promoter/train/positive.csv ../../datasets/human_ensembl_regulatory/train/promoter.csv
!mv ../../datasets/homo_sapiens_regulatory_feature_promoter/test/positive.csv ../../datasets/human_ensembl_regulatory/test/promoter.csv
!mv ../../datasets/homo_sapiens_regulatory_feature_enhancer/train/positive.csv ../../datasets/human_ensembl_regulatory/train/enhancer.csv
!mv ../../datasets/homo_sapiens_regulatory_feature_enhancer/test/positive.csv ../../datasets/human_ensembl_regulatory/test/enhancer.csv_____no_output_____def chop_sequnces(file_path, max_len):
df = pd.read_csv(file_path)
df_array = df.values
new_df_array = []
index = 0
for row in df_array:
splits = ((row[3] - row[2]) // max_len) + 1
if splits == 1:
new_df_array.append([index, row[1], row[2], row[3], row[4]])
index += 1
elif splits == 2:
length = (row[3] - row[2]) // 2
new_df_array.append([
index,
row[1],
row[2],
row[2] + length,
row[4]
])
index += 1
new_df_array.append([
index,
row[1],
row[2] + length + 1,
row[3],
row[4]
])
index += 1
else:
length = (row[3] - row[2]) // splits
new_df_array.append([
index,
row[1],
row[2],
row[2] + length,
row[4]
])
index += 1
for i in range(1, splits - 1):
new_df_array.append([
index,
row[1],
row[2] + i*length + 1,
row[2] + (i + 1)*length,
row[4]
])
index += 1
new_df_array.append([
index,
row[1],
row[2] + (splits - 1)*length + 1,
row[3],
row[4]
])
index += 1
new_df = pd.DataFrame(new_df_array, columns=df.columns)
new_df.to_csv(file_path, index=False)_____no_output_____chop_sequnces("../../datasets/human_ensembl_regulatory/train/promoter.csv", 700)
chop_sequnces("../../datasets/human_ensembl_regulatory/test/promoter.csv", 700)_____no_output_____!find ../../datasets/human_ensembl_regulatory/ -type f -name "*.csv" -exec gzip {} \;_____no_output_____!mv ../../datasets/homo_sapiens_regulatory_feature_enhancer/metadata.yaml ../../datasets/human_ensembl_regulatory/metadata.yaml_____no_output_____with open("../../datasets/human_ensembl_regulatory/metadata.yaml", "r") as stream:
try:
config = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
config_____no_output_____new_config = {
'classes' : {
'ocr': {
'type': config['classes']['positive']['type'],
'url': config['classes']['positive']['url'],
'extra_processing': 'ENSEMBL_HUMAN_GENOME'
},
'promoter': {
'type': config['classes']['positive']['type'],
'url': config['classes']['positive']['url'],
'extra_processing': 'ENSEMBL_HUMAN_GENOME'
},
'enhancer': {
'type': config['classes']['positive']['type'],
'url': config['classes']['positive']['url'],
'extra_processing': 'ENSEMBL_HUMAN_GENOME'
}
},
'version': config['version']
}
new_config _____no_output_____with open("../../datasets/human_ensembl_regulatory/metadata.yaml", 'w') as handle:
yaml.dump(new_config, handle)_____no_output_____
</code>
# Cleaning_____no_output_____
<code>
!rm user_config.yaml
!rm -rf ../../datasets/tmp/
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_CTCF_binding_site
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_enhancer
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_promoter
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_promoter_flanking_region
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_TF_binding_site
!rm -rf ../../datasets/homo_sapiens_regulatory_feature_open_chromatin_region
_____no_output_____
</code>
# Testing_____no_output_____
<code>
from genomic_benchmarks.loc2seq import download_dataset
download_dataset("human_ensembl_regulatory", local_repo=True)Reference /home/katarina/.genomic_benchmarks/fasta/Homo_sapiens.GRCh38.dna.toplevel.fa.gz already exists. Skipping.
from genomic_benchmarks.data_check import info
info("human_ensembl_regulatory", 0, local_repo=True)Dataset `human_ensembl_regulatory` has 3 classes: enhancer, ocr, promoter.
The length of genomic intervals ranges from 71 to 802, with average 429.91753643694585 and median 401.0.
Totally 289061 sequences have been found, 231348 for training and 57713 for testing.
</code>
| {
"repository": "ML-Bioinfo-CEITEC/genomic_benchmarks",
"path": "docs/human_ensembl_regulatory/create_datasets.ipynb",
"matched_keywords": [
"BioPython"
],
"stars": 5,
"size": 27679,
"hexsha": "d0b8d40d9611ce8bc585c1ad34653d4d6d55bcf6",
"max_line_length": 283,
"avg_line_length": 43.2484375,
"alphanum_fraction": 0.5924708263
} |
# Notebook from taebinkim7/py_jive
Path: doc/explanation/jive_explanation.ipynb
# **JIVE: Joint and Individual Variation Explained**_____no_output_____JIVE (Joint and Individual Variation Explained) is a dimensional reduction algorithm that can be used when there are multiple data matrices (data blocks). The multiple data block setting means there are $K$ different data matrices, with the same number of observations $n$ and (possibly) different numbers of variables ($d_1, \dots, d_k$). JIVE finds modes of variation which are common (joint) to all $K$ data blocks and modes of individual variation which are specific to each block. For a detailed discussion of JIVE see [Angle-Based Joint and Individual Variation Explained](https://arxiv.org/pdf/1704.02060.pdf).[^1]
For a concrete example, consider a two block example from a medical study. Suppose there are $n=500$ patients (observations). For each patient we have $d_1 = 100$ bio-medical variables (e.g. hit, weight, etc). Additionally we have $d_2 = 10,000$ gene expression measurements for each patient.
## **The JIVE decomposition**
Suppose we have $K$ data data matrices (blocks) with the same number of observations, but possibly different numbers of variables; in particular let $X^{(1)}, \dots, X^{(K)}$ where $X^{(k)} \in \mathbb{R}^{n \times d_k}$. JIVE will then decompose each matrix into three components: joint signal, individual signal and noise
\begin{equation}
X^{(k)} = J^{(k)} + I^{(k)} + E^{(k)}
\end{equation}
where $J^{(k)}$ is the joint signal estimate, $I^{(k)}$ is the individual signal estimate and $E^{(k)}$ is the noise estimate (each of these matrices must the same shape as the original data block: $\mathbb{R}^{n \times d_k}$). Note: **we assume each data matrix** $X^{(k)}$ **has been column mean centered**.
The matrices satisfy the following constraints:
1. The joint matrices have a common rank: $rk(J^{(k)}) = r_{joint}$ for $k=1, \dots, K$.
2. The individual matrices have block specific ranks $rk(I^{(k)}) = r_{individual}^{(k)}$.
3. The columns of the joint matrices share a common space called the joint score space (a subspace of $\mathbb{R}^n$); in particular the $\text{col-span}(J^{(1)}) = \dots = \text{col-span}(J^{(K)})$ (hence the name joint).
4. Each individual spaces score subspace (of $\mathbb{R}^n$) is orthogonal to the the joint space; in particular $\text{col-span}(J^{(k)}) \perp \text{col-span}(I^{(k)})$ for $k=1, \dots, K$.
Note that JIVE may be more natural if we think about data matrices subspaces of $\mathbb{R}^n$ (the score space perspective). Typically we think of a data matrix as $n$ points in $\mathbb{R}^d$. The score space perspective views a data matrix as $d$ vectors in $\mathbb{R}^n$ (or rather the span of these vectors). One important consequence of this perspective is that it makes sense to related the data blocks in score space (e.g. as subspaces of $\mathbb{R}^n$) since they share observtions.
## Quantities of interest
There are a number of potential quantities of interest depending on the application. For example the user may be interested in the full matrices $J^{(k)}$ and/or $I^{(k)}$. By construction these matrices are not full rank and we may also be interested in their singular value decomposition which we define as
\begin{align}
& U^{(k)}_{joint}, D^{(k)}_{joint}, V^{(k)}_{joint} = \text{rank } r_{joint} \text{ SVD of } J^{(k)} \\
& U^{(k)}_{individual}, D^{(k)}_{individual}, V^{(k)}_{individual} = \text{rank } r_{individual}^{{k}} \text{ SVD of } I^{(k)}
\end{align}
One additional quantity of interest is $U_{joint} \in \mathbb{R}^{n \times r_{joint}}$ which is an orthogonal basis of $\text{col-span}(J^{(k)})$. This matrix is produced from an intermediate JIVE computation.
## **PCA analogy**
We give a brief discussion of the PCA/SVD decomposition (assuming the reading is already familiar).
#### Basic decomposition
Suppose we have a data matrix $X \in \mathbb{n \times d}$. Assume that $X$ has been column mean centered and consider the SVD decomposition (this is PCA since we have mean centered the data):
\begin{equation}
X = U D V^T.
\end{equation}
where $U \in \mathbb{R}^{n \times m}$, $D \in \mathbb{R}^{m \times m}$ is diagonal, and $V \in \mathbb{R}^{d \times m}$ with $m = min(n, d)$. Note $U^TU = V^TV = I_{m \times m}$.
Suppose we have decided to use a rank $r$ approximation. We can then decompose $X$ into a signal matrix ($A$) and an noise matrix ($E$)
\begin{equation}
X = A + E,
\end{equation}
where $A$ is the rank $r$ SVD approximation of $X$ i.e.
\begin{align}
A := & U_{:, 1:r} D_{1:r, 1:r} V_{:, 1:r}^T \\
= & \widetilde{U}, \widetilde{D} \widetilde{V}^T
\end{align}
The notation $U_{:, 1:r} \in \mathbb{R}^{n \times r}$ means the first $r$ columns of $U$. Similarly we can see the error matrix is $E :=U_{:, r+1:n} D_{r+1:m, r_1:m} V_{:, r+1:d}^T$.
#### Quantities of interest
There are many ways to use a PCA/SVD decomposition. Some common quantities of interest include
- The normalized scores: $\widetilde{U} \in \mathbb{R}^{n \times r}$
- The unnormalized scores: $\widetilde{U}\widetilde{D} \in \mathbb{R}^{n \times r}$
- The loadings: $\widetilde{V} \in \mathbb{R}^{d \times r}$
- The full signal approximation: $A \in \mathbb{R}^{n \times d}$
#### Scores and loadings
For both PCA and JIVE we use the notation $U$ (scores) and $V$ (loadings). These show up in several places.
We refer to all $U \in \mathbb{R}^{n \times r}$ matrices as scores. We can view the $n$ rows of $U$ as representing the $n$ data points with $r$ derived variables (put differently, columns of $U$ are $r$ derived variables). The columns of $U$ are orthonormal: $U^TU = I_{r \times r}$.
Sometimes we may want $UD$ i.e scale the columns of $U$ by $D$ (the columns are still orthogonal). The can be useful when we want to represent the original data by $r$ variables. We refer to $UD$ as unnormalized scores.
We refer to all $V\in \mathbb{R}^{d \times r}$ matrices as loadings[^2]. The j$th$ column of $V$ gives the linear combination of the original $d$ variables which is equal to the j$th$ unnormalized scores (j$th$ column of $UD$). Equivalently, if we project the $n$ data points (rows of $X$) onto the j$th$ column of $V$ we get the j$th$ unnormalized scores.
The typical geometric perspective of PCA is that the scores represent $r$ new derived variables. For example, if $r = 2$ we can look at a scatter plot that gives a two dimensional approximation of the data. In other words, the rows of the scores matrix are $n$ data points living in $\mathbb{R}^r$.
An alternative geometric perspective is the $r$ columns of the scores matrix are vectors living in $\mathbb{R}^n$. The original $d$ variables span a subspace of $\mathbb{R}^n$ given by $\text{col-span}(X)$. The scores then span a lower dimensional subspace of $\mathbb{R}^n$ that approximates $\text{col-span}(X)$.
The first perspective says PCA finds a lower dimensional approximation to a subspace in $\mathbb{R}^d$ (spanned by the $n$ data points). The second perspective says PCA finds a lower dimensional approximation to a subspace in $\mathbb{R}^n$ (spanned by the $d$ data points).
## **JIVE operating in score space**
For a data matrix $X$ let's call the span of the variables (columns) the *score subpace*, $\text{col-span}(X) \subset \mathbb{R}^n$. Typically we think of a data matrix as $n$ points in $\mathbb{R}^d$. The score space perspective reverses this and says a data matrix is $d$ points in $\mathbb{R}^n$. When thinking in the score space it's common to consider about subspaces i.e. the span of the $d$ variables in $\mathbb{R}^n$. In other words, if two data matrices have the same column span then their score subspaces are the same[^3].
JIVE partitions the score space of each data matrix into three subspaces: joint, individual and noise. The joint score subspace for each data block is the same. The individual score subspace, however, is (possibly) different for each of the $K$ blocks. The k$th$ block's individual score subspace is orthogonal to the joint score subspace. Recall that the $K$ data matrices have the same number of observations ($n$) so it makes sense to think about how the data matrices relate to each other in score space.
PCA partitions the score space into two subspaces: signal and noise (see above). For JIVE we might combine the joint and individual score subspaces and call this the signal score subspace.
_____no_output_____# Footnotes
[^1]: Note this paper calls the algorithm AJIVE (angle based JIVE) however, we simply use JIVE. Additionally, the paper uses columns as observations in data matrices where as we use rows as observations.
[^2]: For PCA we used tildes (e.g. $\widetilde{U}$) to denote the "partial" SVD approximation however for the final JIVE decomposition we do not use tildes. This is intentional since for JIVE the SVD comes from the $I$ and $J$ matrices which are exactly rank $r$. Therefore we view this SVD as the "full" SVD.
[^3]: This might remind the reader of TODO_____no_output_____
| {
"repository": "taebinkim7/py_jive",
"path": "doc/explanation/jive_explanation.ipynb",
"matched_keywords": [
"gene expression"
],
"stars": 13,
"size": 10609,
"hexsha": "d0ba381c794daac3ccd21b8ae510065a8e7a9c46",
"max_line_length": 631,
"avg_line_length": 72.6643835616,
"alphanum_fraction": 0.6448298614
} |
# Notebook from bioexcel/biobb_REST_API_documentation
Path: biobb_REST_API_documentation/html/biobb_REST_API_documentation.web.ipynb
# The BioBB REST API
The **[BioBB REST API](https://mmb.irbbarcelona.org/biobb-api)** allows the execution of the **[BioExcel Building Blocks](https://mmb.irbbarcelona.org/biobb/)** in a remote server.
## Documentation
For an extense documentation section, please go to the **[BioBB REST API website help](https://mmb.irbbarcelona.org/biobb-api/rest)**.
## Settings
### Auxiliar libraries used
* [requests](https://pypi.org/project/requests/): Requests allows you to send *organic, grass-fed* HTTP/1.1 requests, without the need for manual labor.
* [nb_conda_kernels](https://github.com/Anaconda-Platform/nb_conda_kernels): Enables a Jupyter Notebook or JupyterLab application in one conda environment to access kernels for Python, R, and other languages found in other environments.
* [nglview](http://nglviewer.org/#nglview): Jupyter/IPython widget to interactively view molecular structures and trajectories in notebooks.
* [ipywidgets](https://github.com/jupyter-widgets/ipywidgets): Interactive HTML widgets for Jupyter notebooks and the IPython kernel.
* [plotly](https://plot.ly/python/offline/): Python interactive graphing library integrated in Jupyter notebooks.
### Conda Installation and Launch
```console
git clone https://github.com/bioexcel/biobb_REST_API_documentation.git
cd biobb_REST_API_documentation
conda env create -f conda_env/environment.yml
conda activate biobb_REST_API_documentation
jupyter-nbextension enable --py --user widgetsnbextension
jupyter-nbextension enable --py --user nglview
jupyter-notebook biobb_REST_API_documentation/notebooks/biobb_REST_API_documentation.ipynb
```
***
## Index
* [Behaviour](#behaviour)
* [Tools information](#tools_info)
* [List of packages](#list_pckg)
* [List of tools](#list_tools)
* [Tool's properties](#tools_prop)
* [Launch tool](#launch_tool)
* [Retrieve status](#retrieve_status)
* [Retrieve data](#retrieve_data)
* [Sample files](#sample_files)
* [All sample files](#all_sample)
* [Package sample files](#pckg_sample)
* [Tool sample files](#tool_sample)
* [Single sample file](#sample)
* [Examples](#examples)
* [Tools information](#tools_info_ex)
* [List of packages](#list_pckg_ex)
* [List of tools from a specific package](#list_tools_ex)
* [Tool's properties](#tools_prop_ex)
* [Launch tool](#launch_tool_ex)
* [Launch job with a YAML file config](#tool_yml_ex)
* [Launch job with a JSON file config](#tool_json_ex)
* [Launch job with a piython dictionary config](#tool_dict_ex)
* [Retrieve status](#retrieve_status_ex)
* [Retrieve data](#retrieve_data_ex)
* [Practical cases](#practical_cases)
* [Example 1: download PDB file from RSCB database](#example1)
* [Example 2: extract heteroatom from a given structure](#example2)
* [Example 3: extract energy components from a given GROMACS energy file](#example3)
***
<img src="https://bioexcel.eu/wp-content/uploads/2019/04/Bioexcell_logo_1080px_transp.png" alt="Bioexcel2 logo"
title="Bioexcel2 logo" width="400" />
***
<a id="behaviour"></a>
## Behaviour
The **BioBB REST API** works as an asynchronous launcher of jobs, as these jobs can last from a few seconds to several minutes, there are some steps that must be performed for having the complete results of every tool.
**BioExcel Building Blocks** are structured in **[packages and tools](http://mmb.irbbarcelona.org/biobb/availability/source)**. Every call to the **BioBB REST API** executes one single tool and returns the output file(s) related to this specific tool.
<a id="tools_info"></a>
### Tools information
<a id="list_pckg"></a>
#### List of packages
In order to get a complete **list of available packages**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch`
This endpoint returns a **JSON HTTP response** with status `200`. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/List%20of%20Services/getPckgList).
<a id="list_tools"></a>
#### List of tools
If there is need for a **list of tools for a single package**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}`
This endpoint returns a **JSON HTTP response** with status `200` or a `404` status if the package id is incorrect. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/List%20of%20Services/getToolsList).
<a id="tools_prop"></a>
#### Tool's properties
If there is only need for the **information of a single tool**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`
This endpoint returns a **JSON HTTP response** with status `200` or a `404` status if the package id and / or the tool id are incorrect. The reason for failure should be detailed in the JSON response. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/Launch%20Tool/getLaunchTool).
<a id="launch_tool"></a>
### Launch tool
For **launching a tool**, we must do a **POST** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`
In the body of this POST request, **we must add the file(s) needed as input** (included the properties config file in **JSON** or **YAML** format) and the name for the output(s). The detailed list of inputs and outputs with its respectives properties can be found in the **GET** request of this same endpoint.
This endpoint returns a **JSON HTTP response** with the following possible status:
* `303`: **The job has been successfully launched** and the user must save the token provided and follow to the next endpoint (defined in the same JSON response)
* `404`: **There was some error launching the tool.** The reason for failure should be detailed in the JSON response.
* `500`: The job has been launched, but **some internal server error** has occurred during the execution.
More information for a generic call in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/Launch%20Tool/postLaunchTool). The documentation for all the tools is available in the [BioBB REST API Tools Documentation section](https://mmb.irbbarcelona.org/biobb-api/tools-documentation?docExpansion=none). Interactive examples for all the tools are available in the [BioBB REST API Tools Execution section](https://mmb.irbbarcelona.org/biobb-api/tools-execution).
<a id="retrieve_status"></a>
### Retrieve status
If the previous endpoint returned a `303` status, we must do a **GET** request to the following endpoint providing the given token in the path:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/retrieve/status/{token}`
This endpoint checks the state of the job and returns a **JSON HTTP response** with the following possible status:
* `200`: **The job has finished successfully** and in the JSON response we can found a list of output files generated by the job with its correspondent id for retrieving them on the next endpoint (defined in the same JSON message).
* `202`: The job is **still running**.
* `404`: **Token incorrect, job unexisting or expired.**
* `500`: Some **internal server error** has occurred during the execution.
More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/Retrieve/getRetrieveStatus).
<a id="retrieve_data"></a>
### Retrieve data
Once the previous endpoint returns a `200` status, the output file(s) are ready for its retrieval, so we must do a **GET** request to the following endpoint providing the given **file id** in the path:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/retrieve/data/{id}`
This endpoint returns the **requested file** with a `200` status or a `404` status if the provided id is incorrect, the file doesn't exist or it has expired. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest/#/Retrieve/getRetrieveData).
Note that if we have executed a job that returns multiple output files, a call to this endpoint must be done **for each of the output files** generated by the job.
<a id="sample_files"></a>
### Sample files
The **BioBB REST API** provides sample files for most of the inputs and outputs of each tool. Files can be accessed thought the whole **BioBB REST API** hierarchical range.
<a id="all_sample"></a>
#### All sample files
In order to download **all the sample files**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/sample`
This endpoint returns the **requested file** with a `200` status. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Sample%20Files/getSample).
<a id="pckg_sample"></a>
#### Package sample files
In order to download **all the sample files of a package**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/sample/{package}`
This endpoint returns the **requested file** with a `200` status or a `404` status if the package id is incorrect. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Sample%20Files/getPackageSample).
<a id="tool_sample"></a>
#### Tool sample files
In order to download **all the sample files of a tool**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/sample/{package}/{tool}`
This endpoint returns the **requested file** with a `200` status or a `404` status if the package id and / or the tool id are incorrect. The reason for failure should be detailed in the JSON response. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Sample%20Files/getToolSample).
<a id="sample"></a>
#### Single sample file
In order to download **a single sample file**, we must do a **GET** request to the following endpoint:
`https://mmb.irbbarcelona.org/biobb-api/rest/v1/sample/{package}/{tool}/{id}`
This endpoint returns the **requested file** with a `200` status or a `404` status if the package id and / or the tool id and / or the file id are incorrect. The reason for failure should be detailed in the JSON response. More information in the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Sample%20Files/getSingleSample)._____no_output_____<a id="examples"></a>
## Examples
Below we will do **calls to all the previously defined endpoints** and define some **functions** for make easier the connection to the **BioBB REST API** through **Jupyter Notebook**.
First off, we will import the Python requests and json library and set the root URI for the **BioBB REST API**._____no_output_____
<code>
import requests
import json
apiURL = "https://mmb.irbbarcelona.org/biobb-api/rest/v1/"_____no_output_____
</code>
<a id="tools_info_ex"></a>
### Tools information_____no_output_____Definition of simple GET / POST request functions and a class Response:_____no_output_____
<code>
# Class for returning response status and json content of a requested URL
class Response:
def __init__(self, status, json):
self.status = status
self.json = json
# Perform GET request
def get_data(url):
r = requests.get(url)
return Response(r.status_code, json.loads(r.text))
# Perform POST request
def post_data(url, d, f):
r = requests.post(url, data = d, files = f)
return Response(r.status_code, json.loads(r.text))_____no_output_____
</code>
<a id="list_pckg_ex"></a>
#### List of packages
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/List%20of%20Services/getPckgList).
##### Endpoint_____no_output_____**GET** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch`_____no_output_____##### Code_____no_output_____
<code>
url = apiURL + 'launch'
response = get_data(url)
print(json.dumps(response.json, indent=2)){
"packages": [
{
"id": "biobb_analysis",
"tools": [
{
"id": "gmx_cluster",
"description": "Creates cluster structures from a given GROMACS compatible trajectory",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_cluster tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_cluster.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Path to the output cluster file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_cluster.pdb",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_rms",
"description": "Performs a Root Mean Square deviation (RMSd) analysis from a given GROMACS compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_rms tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_rms.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_rms.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_rgyr",
"description": "Computes the radius of gyration (Rgyr) of a molecule about the x-, y- and z-axes, as a function of time, from a given GROMACS compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_rgyr tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_rgyr.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_rgyr.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_energy",
"description": "Extracts energy components from a given GROMACS energy file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_energy tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_energy.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_energy_path",
"required": true,
"description": "Path to the input EDR file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/energy.edr",
"formats": [
".*\\.edr$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_energy.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_image",
"description": "Corrects periodicity (image) from a given GROMACS compatible trajectory file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_image tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_image.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_traj_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_image.xtc",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_trjconv_str",
"description": "Converts between GROMACS compatible structure file formats and/or extracts a selection of atoms.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_str tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_str.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_str_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.str.pdb",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_trjconv_str_ens",
"description": "Extracts an ensemble of frames containing a selection of atoms from GROMACS compatible trajectory files.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_str_ens tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_str_ens.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_str_ens_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.str.ens.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "gmx_trjconv_trj",
"description": "Converts between GROMACS compatible trajectory file formats and/or extracts a selection of atoms.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_trj tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_trj.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_traj_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.trj.xtc",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "cpptraj_average",
"description": "Calculates a structure average of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_average tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_average.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed structure",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.average.pdb",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_bfactor",
"description": "Calculates the Bfactor fluctuations of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_bfactor tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.bfactor.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rms",
"description": "Calculates the Root Mean Square deviation (RMSd) of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rms tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rms.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rmsf",
"description": "Calculates the Root Mean Square fluctuations (RMSf) of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rmsf tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rmsf.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rgyr",
"description": "Computes the radius of gyration (Rgyr) from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rgyr tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_rgyr.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rgyr.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_dry",
"description": "Dehydrates a given cpptraj compatible trajectory stripping out solvent molecules and ions.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_dry tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_dry.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.dry.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_strip",
"description": "Strips a defined set of atoms (mask) from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_strip tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_strip.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.strip.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_snapshot",
"description": "Extracts a particular snapshot from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_snapshot tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_snapshot.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed structure",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.snapshot.pdb",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_slice",
"description": "Extracts a particular trajectory slice from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_slice tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_slice.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.slice.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_convert",
"description": "Converts between cpptraj compatible trajectory file formats and/or extracts a selection of atoms or frames.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_convert tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_convert.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.convert.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_mask",
"description": "Extracts a selection of atoms from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_mask tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_mask.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.mask.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_image",
"description": "Corrects periodicity (image) from a given cpptraj trajectory file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_image tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_image.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.image.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
}
]
},
{
"id": "biobb_chemistry",
"tools": [
{
"id": "acpype_params_ac",
"description": "Small molecule parameterization for AMBER MD package.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the acpype_params_ac tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_acpype_params_ac.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/acpype/acpype.params.mol2",
"formats": [
".*\\.pdb$",
".*\\.mdl$",
".*\\.mol2$"
]
},
{
"id": "output_path_frcmod",
"required": true,
"description": "Path to the FRCMOD output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.ac.frcmod",
"formats": [
".*\\.frcmod$"
]
},
{
"id": "output_path_inpcrd",
"required": true,
"description": "Path to the INPCRD output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.ac.inpcrd",
"formats": [
".*\\.inpcrd$"
]
},
{
"id": "output_path_lib",
"required": true,
"description": "Path to the LIB output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.ac.lib",
"formats": [
".*\\.lib$"
]
},
{
"id": "output_path_prmtop",
"required": true,
"description": "Path to the PRMTOP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.ac.prmtop",
"formats": [
".*\\.prmtop$"
]
}
]
},
{
"id": "acpype_params_cns",
"description": "Small molecule parameterization for CNS/XPLOR MD package.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the acpype_params_cns tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_acpype_params_cns.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/acpype/acpype.params.mol2",
"formats": [
".*\\.pdb$",
".*\\.mdl$",
".*\\.mol2$"
]
},
{
"id": "output_path_par",
"required": true,
"description": "Path to the PAR output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.cns.par",
"formats": [
".*\\.par$"
]
},
{
"id": "output_path_inp",
"required": true,
"description": "Path to the INP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.cns.inp",
"formats": [
".*\\.inp$"
]
},
{
"id": "output_path_top",
"required": true,
"description": "Path to the TOP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.cns.top",
"formats": [
".*\\.top$"
]
}
]
},
{
"id": "acpype_params_gmx",
"description": "Small molecule parameterization for GROMACS MD package.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the acpype_params_gmx tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_acpype_params_gmx.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/acpype/acpype.params.mol2",
"formats": [
".*\\.pdb$",
".*\\.mdl$",
".*\\.mol2$"
]
},
{
"id": "output_path_gro",
"required": true,
"description": "Path to the GRO output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.gmx.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_path_itp",
"required": true,
"description": "Path to the ITP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.gmx.itp",
"formats": [
".*\\.itp$"
]
},
{
"id": "output_path_top",
"required": true,
"description": "Path to the TOP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.gmx.top",
"formats": [
".*\\.top$"
]
}
]
},
{
"id": "acpype_params_gmx_opls",
"description": "Small molecule parameterization for OPLS/AA MD package.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the acpype_params_gmx_opls tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_acpype_params_gmx_opls.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/acpype/acpype.params.mol2",
"formats": [
".*\\.pdb$",
".*\\.mdl$",
".*\\.mol2$"
]
},
{
"id": "output_path_itp",
"required": true,
"description": "Path to the ITP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.gmx.opls.itp",
"formats": [
".*\\.itp$"
]
},
{
"id": "output_path_top",
"required": true,
"description": "Path to the TOP output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/acpype/ref_acpype.gmx.opls.top",
"formats": [
".*\\.top$"
]
}
]
},
{
"id": "babel_convert",
"description": "Small molecule format conversion.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the babel_convert tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_babel_convert.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/babel/babel.smi",
"formats": [
".*\\.abinit$",
".*\\.acesout$",
".*\\.acr$",
".*\\.adfout$",
".*\\.alc$",
".*\\.aoforce$",
".*\\.arc$",
".*\\.axsf$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c09out$",
".*\\.c3d2$",
".*\\.caccrt$",
".*\\.can$",
".*\\.car$",
".*\\.castep$",
".*\\.ccc$",
".*\\.cdjson$",
".*\\.cdx$",
".*\\.cdxml$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dallog$",
".*\\.dalmol$",
".*\\.dat$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.fch$",
".*\\.fchk$",
".*\\.fck$",
".*\\.feat$",
".*\\.fhiaims$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.g03$",
".*\\.g09$",
".*\\.g92$",
".*\\.g94$",
".*\\.g98$",
".*\\.gal$",
".*\\.gam$",
".*\\.gamess$",
".*\\.gamin$",
".*\\.gamout$",
".*\\.got$",
".*\\.gpr$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.HISTORY$",
".*\\.inchi$",
".*\\.inp$",
".*\\.ins$",
".*\\.jin$",
".*\\.jout$",
".*\\.log$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.moo$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mopout$",
".*\\.mpc$",
".*\\.mpo$",
".*\\.mpqc$",
".*\\.mrv$",
".*\\.msi$",
".*\\.nwo$",
".*\\.orca$",
".*\\.out$",
".*\\.outmol$",
".*\\.output$",
".*\\.pc$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pos$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.prep$",
".*\\.pwscf$",
".*\\.qcout$",
".*\\.res$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.siesta$",
".*\\.smi$",
".*\\.smiles$",
".*\\.smy$",
".*\\.sy2$",
".*\\.t41$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xml$",
".*\\.xsf$",
".*\\.xtc$",
".*\\.xyz$",
".*\\.yob$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/babel/ref_babel.convert.mol2",
"formats": [
".*\\.acesin$",
".*\\.adf$",
".*\\.alc$",
".*\\.ascii$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c3d1$",
".*\\.c3d2$",
".*\\.cac$",
".*\\.caccrt$",
".*\\.cache$",
".*\\.cacint$",
".*\\.can$",
".*\\.cdjson$",
".*\\.cdxml$",
".*\\.cht$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.com$",
".*\\.confabreport$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.copy$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.csr$",
".*\\.cssr$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dalmol$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.feat$",
".*\\.fh$",
".*\\.fhiaims$",
".*\\.fix$",
".*\\.fps$",
".*\\.fpt$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.gamin$",
".*\\.gau$",
".*\\.gjc$",
".*\\.gjf$",
".*\\.gpr$",
".*\\.gr96$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.inchi$",
".*\\.inchikey$",
".*\\.inp$",
".*\\.jin$",
".*\\.k$",
".*\\.lmpdat$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mna$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.molreport$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mp$",
".*\\.mpc$",
".*\\.mpd$",
".*\\.mpqcin$",
".*\\.mrv$",
".*\\.msms$",
".*\\.nul$",
".*\\.nw$",
".*\\.orcainp$",
".*\\.outmol$",
".*\\.paint$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pointcloud$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pov$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.qcin$",
".*\\.report$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.smi$",
".*\\.smiles$",
".*\\.stl$",
".*\\.svg$",
".*\\.sy2$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xed$",
".*\\.xyz$",
".*\\.yob$",
".*\\.zin$"
]
}
]
},
{
"id": "babel_add_hydrogens",
"description": "Adds hydrogen atoms to small molecules.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the babel_add_hydrogens tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_babel_add_hydrogens.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/babel/babel.no.H.pdb",
"formats": [
".*\\.abinit$",
".*\\.acesout$",
".*\\.acr$",
".*\\.adfout$",
".*\\.alc$",
".*\\.aoforce$",
".*\\.arc$",
".*\\.axsf$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c09out$",
".*\\.c3d2$",
".*\\.caccrt$",
".*\\.can$",
".*\\.car$",
".*\\.castep$",
".*\\.ccc$",
".*\\.cdjson$",
".*\\.cdx$",
".*\\.cdxml$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dallog$",
".*\\.dalmol$",
".*\\.dat$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.fch$",
".*\\.fchk$",
".*\\.fck$",
".*\\.feat$",
".*\\.fhiaims$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.g03$",
".*\\.g09$",
".*\\.g92$",
".*\\.g94$",
".*\\.g98$",
".*\\.gal$",
".*\\.gam$",
".*\\.gamess$",
".*\\.gamin$",
".*\\.gamout$",
".*\\.got$",
".*\\.gpr$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.HISTORY$",
".*\\.inchi$",
".*\\.inp$",
".*\\.ins$",
".*\\.jin$",
".*\\.jout$",
".*\\.log$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.moo$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mopout$",
".*\\.mpc$",
".*\\.mpo$",
".*\\.mpqc$",
".*\\.mrv$",
".*\\.msi$",
".*\\.nwo$",
".*\\.orca$",
".*\\.out$",
".*\\.outmol$",
".*\\.output$",
".*\\.pc$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pos$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.prep$",
".*\\.pwscf$",
".*\\.qcout$",
".*\\.res$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.siesta$",
".*\\.smi$",
".*\\.smiles$",
".*\\.smy$",
".*\\.sy2$",
".*\\.t41$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xml$",
".*\\.xsf$",
".*\\.xtc$",
".*\\.xyz$",
".*\\.yob$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/babel/ref_babel.hydrogens.pdb",
"formats": [
".*\\.acesin$",
".*\\.adf$",
".*\\.alc$",
".*\\.ascii$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c3d1$",
".*\\.c3d2$",
".*\\.cac$",
".*\\.caccrt$",
".*\\.cache$",
".*\\.cacint$",
".*\\.can$",
".*\\.cdjson$",
".*\\.cdxml$",
".*\\.cht$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.com$",
".*\\.confabreport$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.copy$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.csr$",
".*\\.cssr$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dalmol$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.feat$",
".*\\.fh$",
".*\\.fhiaims$",
".*\\.fix$",
".*\\.fps$",
".*\\.fpt$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.gamin$",
".*\\.gau$",
".*\\.gjc$",
".*\\.gjf$",
".*\\.gpr$",
".*\\.gr96$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.inchi$",
".*\\.inchikey$",
".*\\.inp$",
".*\\.jin$",
".*\\.k$",
".*\\.lmpdat$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mna$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.molreport$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mp$",
".*\\.mpc$",
".*\\.mpd$",
".*\\.mpqcin$",
".*\\.mrv$",
".*\\.msms$",
".*\\.nul$",
".*\\.nw$",
".*\\.orcainp$",
".*\\.outmol$",
".*\\.paint$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pointcloud$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pov$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.qcin$",
".*\\.report$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.smi$",
".*\\.smiles$",
".*\\.stl$",
".*\\.svg$",
".*\\.sy2$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xed$",
".*\\.xyz$",
".*\\.yob$",
".*\\.zin$"
]
}
]
},
{
"id": "babel_remove_hydrogens",
"description": "Removes hydrogen atoms to small molecules.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the babel_remove_hydrogens tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_babel_remove_hydrogens.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/babel/babel.H.pdb",
"formats": [
".*\\.abinit$",
".*\\.acesout$",
".*\\.acr$",
".*\\.adfout$",
".*\\.alc$",
".*\\.aoforce$",
".*\\.arc$",
".*\\.axsf$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c09out$",
".*\\.c3d2$",
".*\\.caccrt$",
".*\\.can$",
".*\\.car$",
".*\\.castep$",
".*\\.ccc$",
".*\\.cdjson$",
".*\\.cdx$",
".*\\.cdxml$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dallog$",
".*\\.dalmol$",
".*\\.dat$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.fch$",
".*\\.fchk$",
".*\\.fck$",
".*\\.feat$",
".*\\.fhiaims$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.g03$",
".*\\.g09$",
".*\\.g92$",
".*\\.g94$",
".*\\.g98$",
".*\\.gal$",
".*\\.gam$",
".*\\.gamess$",
".*\\.gamin$",
".*\\.gamout$",
".*\\.got$",
".*\\.gpr$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.HISTORY$",
".*\\.inchi$",
".*\\.inp$",
".*\\.ins$",
".*\\.jin$",
".*\\.jout$",
".*\\.log$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.moo$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mopout$",
".*\\.mpc$",
".*\\.mpo$",
".*\\.mpqc$",
".*\\.mrv$",
".*\\.msi$",
".*\\.nwo$",
".*\\.orca$",
".*\\.out$",
".*\\.outmol$",
".*\\.output$",
".*\\.pc$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pos$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.prep$",
".*\\.pwscf$",
".*\\.qcout$",
".*\\.res$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.siesta$",
".*\\.smi$",
".*\\.smiles$",
".*\\.smy$",
".*\\.sy2$",
".*\\.t41$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xml$",
".*\\.xsf$",
".*\\.xtc$",
".*\\.xyz$",
".*\\.yob$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/babel/ref_babel.nohydrogens.pdb",
"formats": [
".*\\.acesin$",
".*\\.adf$",
".*\\.alc$",
".*\\.ascii$",
".*\\.bgf$",
".*\\.box$",
".*\\.bs$",
".*\\.c3d1$",
".*\\.c3d2$",
".*\\.cac$",
".*\\.caccrt$",
".*\\.cache$",
".*\\.cacint$",
".*\\.can$",
".*\\.cdjson$",
".*\\.cdxml$",
".*\\.cht$",
".*\\.cif$",
".*\\.ck$",
".*\\.cml$",
".*\\.cmlr$",
".*\\.com$",
".*\\.confabreport$",
".*\\.CONFIG$",
".*\\.CONTCAR$",
".*\\.CONTFF$",
".*\\.copy$",
".*\\.crk2d$",
".*\\.crk3d$",
".*\\.csr$",
".*\\.cssr$",
".*\\.ct$",
".*\\.cub$",
".*\\.cube$",
".*\\.dalmol$",
".*\\.dmol$",
".*\\.dx$",
".*\\.ent$",
".*\\.exyz$",
".*\\.fa$",
".*\\.fasta$",
".*\\.feat$",
".*\\.fh$",
".*\\.fhiaims$",
".*\\.fix$",
".*\\.fps$",
".*\\.fpt$",
".*\\.fract$",
".*\\.fs$",
".*\\.fsa$",
".*\\.gamin$",
".*\\.gau$",
".*\\.gjc$",
".*\\.gjf$",
".*\\.gpr$",
".*\\.gr96$",
".*\\.gro$",
".*\\.gukin$",
".*\\.gukout$",
".*\\.gzmat$",
".*\\.hin$",
".*\\.inchi$",
".*\\.inchikey$",
".*\\.inp$",
".*\\.jin$",
".*\\.k$",
".*\\.lmpdat$",
".*\\.lpmd$",
".*\\.mcdl$",
".*\\.mcif$",
".*\\.MDFF$",
".*\\.mdl$",
".*\\.ml2$",
".*\\.mmcif$",
".*\\.mmd$",
".*\\.mmod$",
".*\\.mna$",
".*\\.mol$",
".*\\.mol2$",
".*\\.mold$",
".*\\.molden$",
".*\\.molf$",
".*\\.molreport$",
".*\\.mop$",
".*\\.mopcrt$",
".*\\.mopin$",
".*\\.mp$",
".*\\.mpc$",
".*\\.mpd$",
".*\\.mpqcin$",
".*\\.mrv$",
".*\\.msms$",
".*\\.nul$",
".*\\.nw$",
".*\\.orcainp$",
".*\\.outmol$",
".*\\.paint$",
".*\\.pcjson$",
".*\\.pcm$",
".*\\.pdb$",
".*\\.pdbqt$",
".*\\.png$",
".*\\.pointcloud$",
".*\\.POSCAR$",
".*\\.POSFF$",
".*\\.pov$",
".*\\.pqr$",
".*\\.pqs$",
".*\\.qcin$",
".*\\.report$",
".*\\.rsmi$",
".*\\.rxn$",
".*\\.sd$",
".*\\.sdf$",
".*\\.smi$",
".*\\.smiles$",
".*\\.stl$",
".*\\.svg$",
".*\\.sy2$",
".*\\.tdd$",
".*\\.text$",
".*\\.therm$",
".*\\.tmol$",
".*\\.txt$",
".*\\.txyz$",
".*\\.unixyz$",
".*\\.VASP$",
".*\\.vmol$",
".*\\.xed$",
".*\\.xyz$",
".*\\.yob$",
".*\\.zin$"
]
}
]
},
{
"id": "babel_minimize",
"description": "Energetically minimize small molecules.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the babel_minimize tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_babel_minimize.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/babel/babel.minimize.pdb",
"formats": [
".*\\.pdb$",
".*\\.mol2$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/babel/ref_babel.minimize.pdb",
"formats": [
".*\\.pdb$",
".*\\.mol2$"
]
}
]
},
{
"id": "reduce_add_hydrogens",
"description": "Adds hydrogen atoms to small molecules.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the reduce_add_hydrogens tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_chemistry/master/biobb_chemistry/test/data/config/config_reduce_add_hydrogens.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/ambertools/reduce.no.H.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/ambertools/ref_reduce.add.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "reduce_remove_hydrogens",
"description": "Removes hydrogen atoms to small molecules.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the reduce_remove_hydrogens tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_path",
"required": true,
"description": "Path to the input file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/data/ambertools/reduce.H.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_chemistry/raw/master/biobb_chemistry/test/reference/ambertools/ref_reduce.remove.pdb",
"formats": [
".*\\.pdb$"
]
}
]
}
]
},
{
"id": "biobb_io",
"tools": [
{
"id": "ligand",
"description": "Downloads a ligand file from the MMB REST API.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the ligand tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_io/master/biobb_io/test/data/config/config_ligand.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Path to the output PDB ligand file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_io/raw/master/biobb_io/test/reference/api/ligand_12d.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "pdb",
"description": "Downloads a PDB file from the RCSB or MMB REST APIs.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the pdb tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Path to the output PDB file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_io/raw/master/biobb_io/test/reference/api/pdb_1ubq.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "pdb_variants",
"description": "Creates a text file containing a list of all the variants mapped to a RSCB PDB code from the corresponding UNIPROT entries.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the pdb_variants tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "output_mutations_list_txt",
"required": true,
"description": "Path to the TXT file containing an ASCII comma separated values of the mutations",
"filetype": "output",
"sample": null,
"formats": [
".*\\.txt$"
]
}
]
},
{
"id": "pdb_cluster_zip",
"description": "Creates a zip file containing all the PDB files in the given sequence similarity cluster percentage of the given RSCB PDB code.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the pdb_cluster_zip tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "output_pdb_zip_path",
"required": true,
"description": "Path to the ZIP or PDB file containing the output PDB files",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_io/raw/master/biobb_io/test/reference/api/reference_output_pdb_zip_path.zip",
"formats": [
".*\\.pdb$",
".*\\.zip$"
]
}
]
}
]
},
{
"id": "biobb_md",
"tools": [
{
"id": "pdb2gmx",
"description": "Creates a compressed (ZIP) GROMACS topology (TOP and ITP files) from a given PDB file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the pdb2gmx tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_md/master/biobb_md/test/data/config/config_pdb2gmx.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_pdb_path",
"required": true,
"description": "Path to the input PDB file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/egfr.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Path to the output GRO file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_pdb2gmx.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path the output TOP topology in zip format",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_pdb2gmx.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "editconf",
"description": "Creates a GROMACS structure file (GRO) adding the information of the solvent box to the input structure file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the editconf tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_gro_path",
"required": true,
"description": "Path to the input GRO file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/editconf.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Path to the output GRO file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_editconf.gro",
"formats": [
".*\\.gro$"
]
}
]
},
{
"id": "genion",
"description": "Creates a new compressed GROMACS topology adding ions until reaching the desired concentration to the input compressed GROMACS topology. ",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the genion tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_tpr_path",
"required": true,
"description": "Path to the input portable run input TPR file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/genion.tpr",
"formats": [
".*\\.tpr$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Path to the input structure GRO file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_genion.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input TOP topology in zip format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/genion.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path the output topology TOP and ITP files zipball",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_genion.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "genrestr",
"description": "Creates a new GROMACS compressed topology applying the indicated force restrains to the given input compressed topology.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the genrestr tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure PDB, GRO or TPR format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/genrestr.gro",
"formats": [
".*\\.pdb$",
".*\\.gro$",
".*\\.tpr$"
]
},
{
"id": "input_ndx_path",
"required": true,
"description": "Path to the input GROMACS index file, NDX format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/genrestr.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_itp_path",
"required": true,
"description": "Path the output ITP topology file with restrains",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_genrestr.itp",
"formats": [
".*\\.itp$"
]
}
]
},
{
"id": "grompp",
"description": "Creates a GROMACS portable binary run input file (TPR) applying the desired properties from the input compressed GROMACS topology.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the grompp tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_md/master/biobb_md/test/data/config/config_grompp.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_gro_path",
"required": true,
"description": "Path to the input GROMACS structure GRO file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/grompp.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input GROMACS topology TOP and ITP files in zip format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/grompp.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_tpr_path",
"required": true,
"description": "Path to the output portable binary run file TPR",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_grompp.tpr",
"formats": [
".*\\.tpr$"
]
},
{
"id": "input_cpt_path",
"required": false,
"description": "Path to the input GROMACS checkpoint file CPT",
"filetype": "input",
"sample": null,
"formats": [
".*\\.cpt$"
]
},
{
"id": "input_ndx_path",
"required": false,
"description": "Path to the input GROMACS index files NDX",
"filetype": "input",
"sample": null,
"formats": [
".*\\.ndx$"
]
}
]
},
{
"id": "mdrun",
"description": "Performs molecular dynamics simulations from an input GROMACS TPR file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the mdrun tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_tpr_path",
"required": true,
"description": "Path to the portable binary run input file TPR",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/mdrun.tpr",
"formats": [
".*\\.tpr$"
]
},
{
"id": "output_trr_path",
"required": true,
"description": "Path to the GROMACS uncompressed raw trajectory file TRR",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_mdrun.trr",
"formats": [
".*\\.trr$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Path to the output GROMACS structure GRO file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_mdrun.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_edr_path",
"required": true,
"description": "Path to the output GROMACS portable energy file EDR",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_mdrun.edr",
"formats": [
".*\\.edr$"
]
},
{
"id": "output_log_path",
"required": true,
"description": "Path to the output GROMACS trajectory log file LOG",
"filetype": "output",
"sample": null,
"formats": [
".*\\.log$"
]
},
{
"id": "output_xtc_path",
"required": false,
"description": "Path to the GROMACS compressed trajectory file XTC",
"filetype": "output",
"sample": null,
"formats": [
".*\\.xtc$"
]
},
{
"id": "output_cpt_path",
"required": false,
"description": "Path to the output GROMACS checkpoint file CPT",
"filetype": "output",
"sample": null,
"formats": [
".*\\.cpt$"
]
},
{
"id": "output_dhdl_path",
"required": false,
"description": "Path to the output dhdl",
"filetype": "output",
"sample": null,
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "make_ndx",
"description": "Creates a GROMACS index file (NDX) from an input selection and an input GROMACS structure file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the make_ndx tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input GRO/PDB/TPR file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/make_ndx.tpr",
"formats": [
".*\\.gro$",
".*\\.pdb$",
".*\\.tpr$"
]
},
{
"id": "output_ndx_path",
"required": true,
"description": "Path to the output index NDX file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_make_ndx.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "input_ndx_path",
"required": false,
"description": "Path to the input index NDX file",
"filetype": "input",
"sample": null,
"formats": [
".*\\.ndx$"
]
}
]
},
{
"id": "select",
"description": "Creates a GROMACS index file (NDX) from an input selection and an input GROMACS structure file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the select tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_md/master/biobb_md/test/data/config/config_select.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input GRO/PDB/TPR file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/make_ndx.tpr",
"formats": [
".*\\.gro$",
".*\\.pdb$",
".*\\.tpr$"
]
},
{
"id": "output_ndx_path",
"required": true,
"description": "Path to the output index NDX file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_select.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "input_ndx_path",
"required": false,
"description": "Path to the input index NDX file",
"filetype": "input",
"sample": null,
"formats": [
".*\\.ndx$"
]
}
]
},
{
"id": "solvate",
"description": "Creates a new compressed GROMACS topology file adding solvent molecules to a given input compressed GROMACS topology file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the solvate tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_solute_gro_path",
"required": true,
"description": "Path to the input GRO file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/solvate.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Path to the output GRO file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_solvate.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input TOP topology in zip format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs/solvate.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path the output topology in zip format",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs/ref_solvate.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "ndx2resttop",
"description": "Creates a new GROMACS compressed topology applying the force restrains to the input groups in the input index file to the given input compressed topology.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the ndx2resttop tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_md/master/biobb_md/test/data/config/config_ndx2resttop.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_ndx_path",
"required": true,
"description": "Path to the input NDX index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs_extra/ndx2resttop.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input TOP topology in zip format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs_extra/ndx2resttop.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path the output TOP topology in zip format",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs_extra/ref_ndx2resttop.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "append_ligand",
"description": "Takes a ligand ITP file and inserts it in a topology.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the append_ligand tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input topology TOP and ITP files zipball",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs_extra/ndx2resttop.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "input_itp_path",
"required": true,
"description": "Path to the ligand ITP file to be inserted in the topology",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/data/gromacs_extra/pep_ligand.itp",
"formats": [
".*\\.itp$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path/Name the output topology TOP and ITP files zipball",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_md/raw/master/biobb_md/test/reference/gromacs_extra/ref_appendligand.zip",
"formats": [
".*\\.zip$"
]
}
]
}
]
},
{
"id": "biobb_model",
"tools": [
{
"id": "fix_side_chain",
"description": "Reconstructs the missing side chains and heavy atoms of the given PDB file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the fix_side_chain tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_pdb_path",
"required": true,
"description": "Input PDB file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_model/raw/master/biobb_model/test/data/model/2ki5.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Output PDB file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_model/raw/master/biobb_model/test/reference/model/output_pdb_path.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "mutate",
"description": "Creates a new PDB file performing the mutations given in a list of amino acid mutations to the input PDB file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the mutate tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_model/master/biobb_model/test/data/config/config_mutate.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_pdb_path",
"required": true,
"description": "Input PDB file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_model/raw/master/biobb_model/test/data/model/2ki5.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Output PDB file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_model/raw/master/biobb_model/test/reference/model/output_mutated_pdb_path.pdb",
"formats": [
".*\\.pdb$"
]
}
]
}
]
},
{
"id": "biobb_pmx",
"tools": [
{
"id": "mutate",
"description": "pmx tool to insert mutated residues in structure files for free energy simulations",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the mutate tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_pmx/master/biobb_pmx/test/data/config/config_mutate.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/data/pmx/frame99.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Path to the output structure file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/reference/pmx/ref_output_structure.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "input_b_structure_path",
"required": false,
"description": "Path to the mutated input structure file",
"filetype": "input",
"sample": null,
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
}
]
},
{
"id": "gentop",
"description": "pmx tool to generate hybrid GROMACS topologies: adding a B state to an .itp or .top file for a hybrid residue",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gentop tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_pmx/master/biobb_pmx/test/data/config/config_gentop.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_zip_path",
"required": true,
"description": "Path the input GROMACS topology TOP and ITP files in zip format",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/data/pmx/topology.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_top_zip_path",
"required": true,
"description": "Path the output TOP topology in zip format",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/reference/pmx/ref_output_topology.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "analyse",
"description": "pmx tool to calculate free energies from fast growth thermodynamic integration simulations.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the analyse tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_pmx/master/biobb_pmx/test/data/config/config_analyse.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_a_xvg_zip_path",
"required": true,
"description": "Path the zip file containing the dgdl",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/data/pmx/xvg_A.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "input_b_xvg_zip_path",
"required": true,
"description": "Path the zip file containing the dgdl",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/data/pmx/xvg_B.zip",
"formats": [
".*\\.zip$"
]
},
{
"id": "output_result_path",
"required": true,
"description": "Path to the TXT results file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_pmx/raw/master/biobb_pmx/test/reference/pmx/ref_result.txt",
"formats": [
".*\\.txt$"
]
},
{
"id": "output_work_plot_path",
"required": true,
"description": "Path to the PNG plot results file",
"filetype": "output",
"sample": null,
"formats": [
".*\\.png$"
]
}
]
}
]
},
{
"id": "biobb_structure_utils",
"tools": [
{
"id": "cat_pdb",
"description": "Class to concat two PDB structures in a single PDB file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cat_pdb tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure1",
"required": true,
"description": "Input structure 1 file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/cat_protein.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "input_structure2",
"required": true,
"description": "Input structure 2 file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/cat_ligand.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output protein file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/ref_cat_pdb.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "extract_atoms",
"description": "Class to extract atoms from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the extract_atoms tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_extract_atoms.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/2vgb.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output structure file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/OE2_atoms.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
}
]
},
{
"id": "extract_chain",
"description": "Class to extract a chain from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the extract_chain tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_extract_chain.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/extract_chain.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output structure file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/ref_extract_chain.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "extract_heteroatoms",
"description": "Class to extract hetero-atoms from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the extract_heteroatoms tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_extract_heteroatoms.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/extract_heteroatom.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_heteroatom_path",
"required": true,
"description": "Output heteroatom file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/ref_extract_heteroatom.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "extract_model",
"description": "Class to extract a model from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the extract_model tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_extract_model.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/extract_model.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output structure file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/ref_extract_model.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "extract_protein",
"description": "Class to extract a protein from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the extract_protein tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/extract_protein.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_protein_path",
"required": true,
"description": "Output protein file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/ref_extract_protein.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "remove_ligand",
"description": "Class to remove the selected ligand atoms from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the remove_ligand tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_remove_ligand.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/WT_aq4_md_1.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output structure file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/WT_apo_md_1.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
}
]
},
{
"id": "remove_pdb_water",
"description": "Class to remove water molecules from PDB 3D structures.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the remove_pdb_water tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_pdb_path",
"required": true,
"description": "Input PDB file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/WT_aq4_md_WAT.pdb",
"formats": [
".*\\.pdb$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Output PDB file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/WT_apo_no_wat.pdb",
"formats": [
".*\\.pdb$"
]
}
]
},
{
"id": "renumber_structure",
"description": "Class to renumber atomic indexes from a 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the renumber_structure tool.",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Input structure file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/cl3.noH.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "output_structure_path",
"required": true,
"description": "Output structure file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/renum_cl3_noH.pdb",
"formats": [
".*\\.pdb$",
".*\\.gro$"
]
},
{
"id": "output_mapping_json_path",
"required": true,
"description": "Output mapping json file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/cl3_output_mapping_json_path.json",
"formats": [
".*\\.json$"
]
}
]
},
{
"id": "sort_gro_residues",
"description": "Class to sort the selected residues from a GRO 3D structure.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the sort_gro_residues tool.",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_structure_utils/master/biobb_structure_utils/test/data/config/config_sort_gro_residues.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_gro_path",
"required": true,
"description": "Input GRO file path",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/data/utils/WT_aq4_md_1.gro",
"formats": [
".*\\.gro$"
]
},
{
"id": "output_gro_path",
"required": true,
"description": "Output sorted GRO file path",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_structure_utils/raw/master/biobb_structure_utils/test/reference/utils/WT_aq4_md_sorted.gro",
"formats": [
".*\\.gro$"
]
}
]
}
]
}
]
}
</code>
<a id="list_tools_ex"></a>
#### List of tools from a specific package
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/List%20of%20Services/getToolsList).
##### Endpoint_____no_output_____**GET** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}`_____no_output_____##### Code_____no_output_____
<code>
package = 'biobb_analysis'
url = apiURL + 'launch/' + package
response = get_data(url)
print(json.dumps(response.json, indent=2)){
"id": "biobb_analysis",
"tools": [
{
"id": "gmx_cluster",
"description": "Creates cluster structures from a given GROMACS compatible trajectory",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_cluster tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_cluster.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_pdb_path",
"required": true,
"description": "Path to the output cluster file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_cluster.pdb",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_rms",
"description": "Performs a Root Mean Square deviation (RMSd) analysis from a given GROMACS compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_rms tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_rms.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_rms.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_rgyr",
"description": "Computes the radius of gyration (Rgyr) of a molecule about the x-, y- and z-axes, as a function of time, from a given GROMACS compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_rgyr tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_rgyr.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_rgyr.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_energy",
"description": "Extracts energy components from a given GROMACS energy file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_energy tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_energy.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_energy_path",
"required": true,
"description": "Path to the input EDR file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/energy.edr",
"formats": [
".*\\.edr$"
]
},
{
"id": "output_xvg_path",
"required": true,
"description": "Path to the XVG output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_energy.xvg",
"formats": [
".*\\.xvg$"
]
}
]
},
{
"id": "gmx_image",
"description": "Corrects periodicity (image) from a given GROMACS compatible trajectory file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_image tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_image.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_traj_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_image.xtc",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_trjconv_str",
"description": "Converts between GROMACS compatible structure file formats and/or extracts a selection of atoms.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_str tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_str.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_structure_path",
"required": true,
"description": "Path to the input structure file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_str_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.str.pdb",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "gmx_trjconv_str_ens",
"description": "Extracts an ensemble of frames containing a selection of atoms from GROMACS compatible trajectory files.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_str_ens tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_str_ens.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the GROMACS input topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/topology.tpr",
"formats": [
".*\\.tpr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.brk$",
".*\\.ent$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_str_ens_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.str.ens.zip",
"formats": [
".*\\.zip$"
]
}
]
},
{
"id": "gmx_trjconv_trj",
"description": "Converts between GROMACS compatible trajectory file formats and/or extracts a selection of atoms.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the gmx_trjconv_trj tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_gmx_trjconv_trj.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the GROMACS trajectory file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/trajectory.trr",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.cpt$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
},
{
"id": "input_index_path",
"required": false,
"description": "Path to the GROMACS index file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/gromacs/index.ndx",
"formats": [
".*\\.ndx$"
]
},
{
"id": "output_traj_path",
"required": true,
"description": "Path to the output file",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/gromacs/ref_trjconv.trj.xtc",
"formats": [
".*\\.xtc$",
".*\\.trr$",
".*\\.gro$",
".*\\.g96$",
".*\\.pdb$",
".*\\.tng$"
]
}
]
},
{
"id": "cpptraj_average",
"description": "Calculates a structure average of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_average tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_average.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed structure",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.average.pdb",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_bfactor",
"description": "Calculates the Bfactor fluctuations of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_bfactor tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.bfactor.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rms",
"description": "Calculates the Root Mean Square deviation (RMSd) of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rms tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rms.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rmsf",
"description": "Calculates the Root Mean Square fluctuations (RMSf) of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rmsf tool",
"filetype": "input",
"sample": null,
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "input_exp_path",
"required": false,
"description": "Path to the experimental reference file (required if reference = experimental)",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/experimental.1e5t.pdb",
"formats": null
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rmsf.first.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_rgyr",
"description": "Computes the radius of gyration (Rgyr) from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_rgyr tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_rgyr.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output analysis",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.rgyr.dat",
"formats": [
".*\\.dat$",
".*\\.agr$",
".*\\.xmgr$",
".*\\.gnu$"
]
}
]
},
{
"id": "cpptraj_dry",
"description": "Dehydrates a given cpptraj compatible trajectory stripping out solvent molecules and ions.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_dry tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_dry.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.dry.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_strip",
"description": "Strips a defined set of atoms (mask) from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_strip tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_strip.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.strip.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_snapshot",
"description": "Extracts a particular snapshot from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_snapshot tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_snapshot.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed structure",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.snapshot.pdb",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_slice",
"description": "Extracts a particular trajectory slice from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_slice tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_slice.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.slice.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_convert",
"description": "Converts between cpptraj compatible trajectory file formats and/or extracts a selection of atoms or frames.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_convert tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_convert.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.convert.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_mask",
"description": "Extracts a selection of atoms from a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_mask tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_mask.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.mask.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
},
{
"id": "cpptraj_image",
"description": "Corrects periodicity (image) from a given cpptraj trajectory file.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_image tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_image.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed trajectory",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.image.netcdf",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
}
]
}
</code>
<a id="tools_prop_ex"></a>
#### Tool's properties
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Launch%20Tool/getLaunchTool).
##### Endpoint_____no_output_____**GET** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`_____no_output_____##### Code_____no_output_____
<code>
package = 'biobb_analysis'
tool = 'cpptraj_average'
url = apiURL + 'launch/' + package + '/' + tool
response = get_data(url)
print(json.dumps(response.json, indent=2)){
"id": "cpptraj_average",
"description": "Calculates a structure average of a given cpptraj compatible trajectory.",
"arguments": [
{
"id": "config",
"required": false,
"description": "Configuration file for the cpptraj_average tool",
"filetype": "input",
"sample": "https://raw.githubusercontent.com/bioexcel/biobb_analysis/master/biobb_analysis/test/data/config/config_cpptraj_average.json",
"formats": [
".*\\.json$",
".*\\.yml$"
]
},
{
"id": "input_top_path",
"required": true,
"description": "Path to the input structure or topology file",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.parm.top",
"formats": [
".*\\.top$",
".*\\.pdb$",
".*\\.prmtop$",
".*\\.parmtop$",
".*\\.zip$"
]
},
{
"id": "input_traj_path",
"required": true,
"description": "Path to the input trajectory to be processed",
"filetype": "input",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/data/ambertools/cpptraj.traj.dcd",
"formats": [
".*\\.crd$",
".*\\.cdf$",
".*\\.netcdf$",
".*\\.restart$",
".*\\.ncrestart$",
".*\\.restartnc$",
".*\\.dcd$",
".*\\.charmm$",
".*\\.cor$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.trr$",
".*\\.gro$",
".*\\.binpos$",
".*\\.xtc$",
".*\\.cif$",
".*\\.arc$",
".*\\.sqm$",
".*\\.sdf$",
".*\\.conflib$"
]
},
{
"id": "output_cpptraj_path",
"required": true,
"description": "Path to the output processed structure",
"filetype": "output",
"sample": "https://github.com/bioexcel/biobb_analysis/raw/master/biobb_analysis/test/reference/ambertools/ref_cpptraj.average.pdb",
"formats": [
".*\\.crd$",
".*\\.netcdf$",
".*\\.rst7$",
".*\\.ncrst$",
".*\\.dcd$",
".*\\.pdb$",
".*\\.mol2$",
".*\\.binpos$",
".*\\.trr$",
".*\\.xtc$",
".*\\.sqm$"
]
}
]
}
</code>
<a id="launch_tool_ex"></a>
### Launch tool
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Launch%20Tool/postLaunchTool). The documentation for all the tools is available in the [BioBB REST API Tools Documentation section](https://mmb.irbbarcelona.org/biobb-api/tools-documentation?docExpansion=none). Interactive examples for all the tools are available in the [BioBB REST API Tools Execution section](https://mmb.irbbarcelona.org/biobb-api/tools-execution).
Definition of functions needed for launch a job:_____no_output_____
<code>
from io import BytesIO
from pathlib import Path
# Function used for encode python dictionary to JSON file
def encode_config(data):
jsonData = json.dumps(data)
binaryData = jsonData.encode()
return BytesIO(binaryData)
# Launch job
def launch_job(url, **kwargs):
data = {}
files = {}
# Fill data (output paths) and files (input files) objects
for key, value in kwargs.items():
# Inputs / Outputs
if type(value) is str:
if key.startswith('input'):
files[key] = (value, open(value, 'rb'))
elif key.startswith('output'):
data[key] = value
elif Path(value).is_file():
files[key] = (value, open(value, 'rb'))
# Properties (in case properties are provided as a dictionary instead of a file)
if type(value) is dict:
files['config'] = ('prop.json', encode_config(value))
# Request URL with data and files
response = post_data(url, data, files)
# Print REST API response
print(json.dumps(response.json, indent=2))
# Save token if status == 303
if response.status == 303:
token = response.json['token']
return token_____no_output_____
</code>
Hereafter we will launch a job on *biobb_analysis.cpptraj_average* tool with the provided *files/* in the files folder of this same repository. The response is a JSON with the status code, the state of the job, a message and a token for checking the job status.
<a id="tool_yml_ex"></a>
#### Launch job with a YAML file config
##### File config_____no_output_____```yaml
properties:
in_parameters:
start: 1
end: -1
step: 1
mask: c-alpha
out_parameters:
format: pdb
```_____no_output_____##### Endpoint_____no_output_____**POST** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`_____no_output_____##### Code_____no_output_____The function below sends POST data and files to the *{package}/{tool}* endpoint. The config properties are sent as a YAML file.
The response is a JSON with the status code, the state of the job, a message and a token that will be used for checking the job status in the next step. _____no_output_____
<code>
# Launch BioBB on REST API with YAML config file
token = launch_job(url = apiURL + 'launch/biobb_analysis/cpptraj_average',
config = 'files/config.yml',
input_top_path = 'files/cpptraj.parm.top',
input_traj_path = 'files/cpptraj.traj.dcd',
output_cpptraj_path = 'output.cpptraj.average.pdb'){
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "fe2805760eeeec0d5b8a34fbc40aa6c2a2d68c7ba1663cccb88659b1e149c898a414bbc04e37bb73efc725b7a29de2a93ffb55e6ef85cd6467f3d62a06ea5bfa"
}
</code>
<a id="tool_json_ex"></a>
#### Launch job with a JSON file config
File config:_____no_output_____```json
{
"in_parameters": {
"start": 1,
"end": -1,
"step": 1,
"mask": "c-alpha"
},
"out_parameters": {
"format": "pdb"
}
}
```_____no_output_____##### Endpoint_____no_output_____**POST** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`_____no_output_____##### Code_____no_output_____The function below sends POST data and files to the *{package}/{tool}* endpoint. The config properties are sent as a JSON file.
The response is a JSON with the status code, the state of the job, a message and a token that will be used for checking the job status in the next step. _____no_output_____
<code>
# Launch BioBB on REST API with JSON config file
token = launch_job(url = apiURL + 'launch/biobb_analysis/cpptraj_average',
config = 'files/config.json',
input_top_path = 'files/cpptraj.parm.top',
input_traj_path = 'files/cpptraj.traj.dcd',
output_cpptraj_path = 'output.cpptraj.average.pdb'){
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "84ab5ef63d82ab3fa4f120532949905d83f6aff65f101cb1ed5fdd5f05acb00421ddc4560098f877f26a96972a8ea8521ab222a0bb78a5ffa9d213c0ab2618c9"
}
</code>
<a id="tool_dict_ex"></a>
#### Launch job with a python dictionary config_____no_output_____##### Endpoint_____no_output_____**POST** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/launch/{package}/{tool}`_____no_output_____##### Code_____no_output_____The function below sends POST data and files to the *{package}/{tool}* endpoint. The config properties are sent as a python dictionary embedded in the code.
The response is a JSON with the status code, the state of the job, a message and a token that will be used for checking the job status in the next step. _____no_output_____
<code>
# Launch BioBB on REST API with JSON config file
prop = {
"in_parameters" : {
"start": 1,
"end": -1,
"step": 1,
"mask": "c-alpha"
},
"out_parameters" : {
"format": "pdb"
}
}
token = launch_job(url = apiURL + 'launch/biobb_analysis/cpptraj_average',
config = prop,
input_top_path = 'files/cpptraj.parm.top',
input_traj_path = 'files/cpptraj.traj.dcd',
output_cpptraj_path = 'output.cpptraj.average.pdb'){
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "98013d74bef397d5498db3eb1008e5e136702d63903b6ea0cb5a2db44c4a4e0adbcd1ce9999915acd90c444f8749880c052185bbbfc747c1ebc7d67d6d2c84c8"
}
</code>
<a id="retrieve_status_ex"></a>
### Retrieve status
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Retrieve/getRetrieveStatus).
Definition of functions needed for retrieve the status of a job:_____no_output_____
<code>
import datetime
from time import sleep
# Checks status until a provided "ok" status is returned by the response
def check_status(url, ok, error):
counter = 0
while True:
if counter < 10: slp = 1
if counter >= 10 and counter < 60: slp = 10
if counter >= 60: slp = 60
counter = counter + slp
sleep(slp)
r = requests.get(url)
if r.status_code == ok or r.status_code == error:
return counter
break
# Function that checks the status and parses the reponse JSON for saving the output files in a list
def check_job(token, apiURL):
# define retrieve status URL
url = apiURL + 'retrieve/status/' + token
# check status until job has finished
counter = check_status(url, 200, 500)
# Get content when status = 200
response = get_data(url)
# Save id for the generated output_files
if response.status == 200:
out_files = []
for outf in response.json['output_files']:
item = { 'id': outf['id'], 'name': outf['name'] }
out_files.append(item)
# Print REST API response
print("Total elapsed time: %s" % str(datetime.timedelta(seconds=counter)))
print("REST API JSON response:")
print(json.dumps(response.json, indent=4))
if response.status == 200:
return out_files
else: return None_____no_output_____
</code>
##### Endpoint_____no_output_____**GET** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/retrieve/status/{token}`_____no_output_____##### Code_____no_output_____The function below checks the status of a job and awaits until the response status is `200`. The response is a JSON with the status code, the state of the job, a message, a list with all the generated output files and the date of the expiration of these files. Additionally, the function also provides the elapsed time since the job has been launched until it has finished. _____no_output_____
<code>
# Check job status
out_files = check_job(token, apiURL)Total elapsed time: 0:00:20
REST API JSON response:
{
"code": 200,
"state": "FINISHED",
"message": "The requested job has finished successfully, please go to /retrieve/data/{id} for each output_files.",
"output_files": [
{
"id": "5e42837a40fe75.05757111",
"name": "output.cpptraj.average.pdb",
"size": 77397,
"mimetype": "text/plain"
}
],
"expiration": "February 13, 2020 00:00 GMT+0000"
}
</code>
<a id="retrieve_data_ex"></a>
### Retrieve data
For more information about this endpoint, please visit the [BioBB REST API Documentation section](https://mmb.irbbarcelona.org/biobb-api/rest#/Retrieve/getRetrieveData).
Definition of functions needed for retrieve the output file(s) generated by a job:_____no_output_____
<code>
# Downloads to disk a file from a given URL
def get_file(url, filename):
r = requests.get(url, allow_redirects=True)
file = open(filename,'wb')
file.write(r.content)
file.close()
# Retrieves all the files provided in the out_files list
def retrieve_data(out_files, apiURL):
if not out_files:
return "No files provided"
for outf in out_files:
get_file(apiURL + 'retrieve/data/' + outf['id'], outf['name'])_____no_output_____
</code>
##### Endpoint_____no_output_____**GET** `https://mmb.irbbarcelona.org/biobb-api/rest/v1/retrieve/data/{id}`_____no_output_____##### Code_____no_output_____The function below makes a single call to the *retrieve/data* endpoint for each output file got in the *retrieve/status* endpoint and save the generated file(s) to disk._____no_output_____
<code>
# Save generated file(s) to disk
retrieve_data(out_files, apiURL)_____no_output_____
</code>
<a id="practical_cases"></a>
## Practical cases_____no_output_____Now we will execute some Bioexcel Building Blocks through the BioBB REST API and with the results we will do some interactions with other python libraries such as [plotly](https://plot.ly/python/offline/) or [nglview](http://nglviewer.org/#nglview)._____no_output_____<a id="example1"></a>
### Example 1: download PDB file from RSCB database_____no_output_____Launch the *biobb_io.pdb* job that downloads a PDB file from the RSCB database:_____no_output_____
<code>
# Downloading desired PDB file
# Create properties dict and inputs/outputs
downloaded_pdb = '3EBP.pdb'
prop = {
'pdb_code': '3EBP',
'filter': False
}
# Launch bb on REST API
token = launch_job(url = apiURL + 'launch/biobb_io/pdb',
config = prop,
output_pdb_path = downloaded_pdb)
{
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "af60d733db949a71167f3aa6a7a793fc520b5a4176b57a770bed4798654a79be2a47b81e6a77de4eb285de84f9b768b119004f0bfbbe4be9e5ff1ffe31b81fd9"
}
# Check job status
out_files = check_job(token, apiURL)Total elapsed time: 0:00:06
REST API JSON response:
{
"code": 200,
"state": "FINISHED",
"message": "The requested job has finished successfully, please go to /retrieve/data/{id} for each output_files.",
"output_files": [
{
"id": "5e428389eeafa3.49051362",
"name": "3EBP.pdb",
"size": 609120,
"mimetype": "text/plain"
}
],
"expiration": "February 13, 2020 00:00 GMT+0000"
}
# Save generated file to disk
retrieve_data(out_files, apiURL)_____no_output_____
</code>
Visualize downloaded PDB in NGLView:_____no_output_____
<code>
import nglview
# Show protein
view = nglview.show_structure_file(downloaded_pdb)
view.add_representation(repr_type='ball+stick', selection='het')
view._remote_call('setSize', target='Widget', args=['','600px'])
view_____no_output_____view.render_image()
view.download_image(filename='ngl1.png')_____no_output_____
</code>
<img src='ngl1.png'></img>_____no_output_____<a id="example2"></a>
### Example 2: extract heteroatom from a given structure_____no_output_____Launch the *biobb_structure_utils.extract_heteroatoms* job that extracts a heteroatom from a PDB file._____no_output_____
<code>
# Extracting heteroatom from a given structure
# Create properties dict and inputs/outputs
heteroatom = 'CPB.pdb'
prop = {
'heteroatoms': [{
'name': 'CPB'
}]
}
# Launch bb on REST API
token = launch_job(url = apiURL + 'launch/biobb_structure_utils/extract_heteroatoms',
config = prop,
input_structure_path = downloaded_pdb,
output_heteroatom_path = heteroatom)
{
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "740c9ac30767ba996e445e4ed05c151ee903fed2234c0cc7ace6ec3ba4e1fa8bdcd5a3c6835c7f1038530eb81c4cc319674f235cf55b38863903181dce09a8d1"
}
# Check job status
out_files = check_job(token, apiURL)Total elapsed time: 0:00:20
REST API JSON response:
{
"code": 200,
"state": "FINISHED",
"message": "The requested job has finished successfully, please go to /retrieve/data/{id} for each output_files.",
"output_files": [
{
"id": "5e4283986555a0.86371712",
"name": "CPB.pdb",
"size": 2268,
"mimetype": "text/plain"
}
],
"expiration": "February 13, 2020 00:00 GMT+0000"
}
# Save generated file to disk
retrieve_data(out_files, apiURL)_____no_output_____
</code>
Visualize generated extracted heteroatom in NGLView:_____no_output_____
<code>
# Show protein
view = nglview.show_structure_file(heteroatom)
view.add_representation(repr_type='ball+stick', selection='het')
view._remote_call('setSize', target='Widget', args=['','600px'])
view_____no_output_____view.render_image()
view.download_image(filename='ngl2.png')_____no_output_____
</code>
<img src='ngl2.png'></img>_____no_output_____<a id="example3"></a>
### Example 3: extract energy components from a given GROMACS energy file_____no_output_____
<code>
# GMXEnergy: Getting system energy by time
# Create prop dict and inputs/outputs
output_min_ene_xvg ='file_min_ene.xvg'
output_min_edr = 'files/1AKI_min.edr'
prop = {
'terms': ["Potential"]
}
# Launch bb on REST API
token = launch_job(url = apiURL + 'launch/biobb_analysis/gmx_energy',
config = prop,
input_energy_path = output_min_edr,
output_xvg_path = output_min_ene_xvg){
"code": 303,
"state": "RUNNING",
"message": "The requested job has has been successfully launched, please go to /retrieve/status/{token} for checking job status.",
"token": "170e8e2645d179eaa40e2de652f3e6dec909ef1df4642526ba789ed21806bab917bb4ce7f9fb730dfe635155f52ac9f3869c4429afcc767bd190f01337a8a718"
}
# Check job status
out_files = check_job(token, apiURL)Total elapsed time: 0:00:08
REST API JSON response:
{
"code": 200,
"state": "FINISHED",
"message": "The requested job has finished successfully, please go to /retrieve/data/{id} for each output_files.",
"output_files": [
{
"id": "5e4283a6c70143.38956052",
"name": "file_min_ene.xvg",
"size": 54143,
"mimetype": "text/plain"
}
],
"expiration": "February 13, 2020 00:00 GMT+0000"
}
# Save generated file to disk
retrieve_data(out_files, apiURL)_____no_output_____
</code>
Visualize generated energy file in plotly:_____no_output_____
<code>
import plotly
import plotly.graph_objs as go
#Read data from file and filter energy values higher than 1000 Kj/mol^-1
with open(output_min_ene_xvg,'r') as energy_file:
x,y = map(
list,
zip(*[
(float(line.split()[0]),float(line.split()[1]))
for line in energy_file
if not line.startswith(("#","@"))
if float(line.split()[1]) < 1000
])
)
plotly.offline.init_notebook_mode(connected=True)
fig = {
"data": [go.Scatter(x=x, y=y)],
"layout": go.Layout(title="Energy Minimization",
xaxis=dict(title = "Energy Minimization Step"),
yaxis=dict(title = "Potential Energy KJ/mol-1")
)
}
plotly.offline.iplot(fig)_____no_output_____
</code>
| {
"repository": "bioexcel/biobb_REST_API_documentation",
"path": "biobb_REST_API_documentation/html/biobb_REST_API_documentation.web.ipynb",
"matched_keywords": [
"molecular dynamics"
],
"stars": null,
"size": 436356,
"hexsha": "d0baab50c9ff431206c661172376b0046692cdc6",
"max_line_length": 38750,
"avg_line_length": 40.1357615894,
"alphanum_fraction": 0.3633478169
} |
# Notebook from sandiegodata/covid19
Path: Notebooks/WhyNotTesting.ipynb
## Imperfect Tests and The Effects of False Positives_____no_output_____The US government has been widely criticized for its failure to test as many of its citizens for COVID-19 infections as other countries. But is mass testing really as easy as it seems? This analysis of the false positive and false negative rates of tests, using published sensitivities and specificities for COVID-19 rt-PCR and antigen tests, shows that even tests with slightly less than perfect results can produce very large numbers of false positives. _____no_output_____
<code>
import sys
# Install required packages
#!{sys.executable} -mpip -q install matplotlib seaborn statsmodels pandas publicdata metapack
%matplotlib inline
import pandas as pd
import geopandas as gpd
import numpy as np
import metapack as mp
import rowgenerators as rg
import publicdata as pub
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(color_codes=True)_____no_output_____
</code>
As the world became more aware of the threat posed by COVID-19 in February 2020, US media began to draw attention to the disparity between the extent of testing being done in other countries versus the United States. The CDC released [fairly restrictive guidelines](https://www.cdc.gov/coronavirus/2019-ncov/hcp/clinical-criteria.html) for what conditions qualified a patient for a lab test for COVID-19 infections, and many media outlets criticized the US CDC for being unprepared to test for the virus.
Criticism intensified when the first version of tests created by the CDC [proved to be unreliable](https://www.forbes.com/sites/rachelsandler/2020/03/02/how-the-cdc-botched-its-initial-coronavirus-response-with-faulty-tests/#5bbf1d50670e). But there are important considerations that these reports have largely ignored, the most important of which is the false positive and false negative rates of the tests, which can produce results that are worse than useless when the prevalence of the condition — the percentage of people who are infected — is very low._____no_output_____Every test — for nearly any sort of test — has an error rate: false positives and false negatives. False negatives are fairly easy to understand. If a 1,000 women who have breast cancer take a test that has a false positive rate of 1%, the test will report that 999 of them have cancer, and 1 who does not, even though she actually does.
The false positive rate is trickier, because it is multipled not by the number of women who have cancer, but by the number of women who take the test. If the situation is that a large number of women are tested, but few have cancer, the test can report many more false positives than women who actually have cancer.
There is evidence that the tests for the COVID-19 virus have a false positive rate large enough that if a large number of people are tested when the prevalence of COVID-19 infections are small, most of the reported positives are false positives.
_____no_output_____# Primer on False Positives and Negatives
Research related to epidemiological tests typically does not report the false positive rate directly; instead it reports two parameters, the Selectivity and Specificity. [Wikipedia has an excellent article](https://en.wikipedia.org/wiki/Sensitivity_and_specificity) describing these parameters and how they related to false positive and false negative rates, and [Health News Review](https://www.healthnewsreview.org/) publishes this [very accessible overview of the most important concepts](https://www.healthnewsreview.org/toolkit/tips-for-understanding-studies/understanding-medical-tests-sensitivity-specificity-and-positive-predictive-value/). The most important part of the Wikipedia article to understand is the table in the [worked example](https://en.wikipedia.org/wiki/Sensitivity_and_specificity#Worked_example). When a test is administered, there are four possible outcomes. The test can return a positive result, which can be a true positive or a false positive, or it can return a negative result, which is a true negative or a false negative. If you organize those posibilities by what is the true condition ( does the patient have the vius or not ):
* Patient has virus
* True Positive ($\mathit{TP}$)
* False negative ($\mathit{FN}$)
* Patient does not have virus
* True Negative ($\mathit{TN}$)
* False Positive. ($\mathit{FP}$)
In the Wikipedia worked example table:
* The number of people who do have the virus is $\mathit{TP}+\mathit{FN}$, the true positives plus the false negatives, which are the cases that should have been reported positive, but were not.
* The number of people who do not have the virus is $\mathit{TN}+\mathit{FP}$, the true negatives and the false positives, which are the cases should have been reported positive, but were not.
The values of Sensitivity and Specificity are defined as:
$$\begin{array}{ll}
Sn = \frac{\mathit{TP}}{\mathit{TP} + \mathit{FN}} & \text{True positives outcomes divided by all positive conditions} \tag{1}\label{eq1}\\
Sp = \frac{\mathit{TN}}{\mathit{FP} + \mathit{TN}} & \text{True negatives outcomes divided by all negative conditions}\\
\end{array}$$
We want to know the number of false positives($\mathit{FP}$) given the number of positive conditions ($\mathit{TP}+\mathit{FN}$) and the total number of tests. To compute these, we need to have some more information about the number of people tested, and how common the disease is:
* Total test population $P$, the number of people being tested, which equals $\mathit{TP}+\mathit{FP}+\mathit{FN}+\mathit{TN}$
* The prevalence $p$, the population rate of positive condition.
We can do a little math to get:
$$\begin{array}{ll}
\mathit{TP} = Pp\mathit{Sn} & \text{}\\
\mathit{FP} = P(1-p)(1-\mathit{Sp}) \text{}\\
\mathit{TN} = P(1-p)\mathit{Sp} & \text{}\\
\mathit{FN} = Pp(1-\mathit{Sn})& \text{}\\
\end{array}$$
You can see examples of these equations worked out in the third line in the red and green cells of the [Worked Example](https://en.wikipedia.org/wiki/Sensitivity_and_specificity#Worked_example) on the Sensitivity and Specificity Wikipedia page.
It is important to note that when these four values are used to calculate $\mathit{Sp}$ and $\mathit{Sn}$, the population value $P$ cancels out, so $\mathit{Sp}$ and $\mathit{Sn}$ do not depend on the number of people tested.
_____no_output_____One of the interesting questions when test results are reported is "What percentage of the positive results are true positives?" This is a particularly important question for the COVID-19 pandemic because there are a lot of reports that most people with the virus are asymptomatic. Are they really asymptomatic, or just false positives?
The metric we're interested here is the portion of positive results that are true positives, the positive predictive value, $\mathit{PPV}$:
$$\mathit{PPV} = \frac{\mathit{TP} }{ \mathit{TP} +\mathit{FP} } $$
Which expands to:
$$\mathit{PPV} = \frac{p\mathit{Sn} }{ p\mathit{Sn} + (1-p)(1-\mathit{Sp}) }\tag{2}\label{eq2} $$
It is important to note that $\mathit{PPV}$ is not dependent on $P$, the size of the population being tested. It depends only on the quality parameters of the test, $\mathit{Sn}$ and $\mathit{Sp}$, and the prevalence, $p$. For a given test, only the prevalence will change over time. _____no_output_____# Selctivity and Specificity Values
It has been dificult to find specificity and sensitivity values for COVID-19 tests, or any rt-PCR tests; research papers rarely publish the values. Howver, there are a few reports for the values for serology tests, and a few reports of values for rt-PRC tests for the MERS-CoV virus.
We can get values for an antibidy test for COVID-19 from a a recently published paper, _Development and Clinical Application of A Rapid IgM-IgG Combined Antibody Test for SARS-CoV-2 Infection Diagnosis_<sup><a href="#fnote2" rel="noopener" target="_self">2</a></sup>, which reports:
> The overall testing sensitivity was 88.66% and specificity was 90.63%
This test is significantly different from the most common early tests for COVID-19; this test looks for antibodies in the patient's blood, while most COVID-19 tests are rt-PCR assays that look for fragments of RNA from the virus.
The article _MERS-CoV diagnosis: An update._<sup><a href="#fnote4" rel="noopener" target="_self">4</a></sup> reports that for MERS-CoV:
> Song et al. developed a rapid immunochromatographic assay for the detection of MERS-CoV nucleocapsid protein from camel nasal swabs with 93.9% sensitivity and 100% specificity compared to RT-rtPCR
The article _Performance Evaluation of the PowerChek MERS (upE & ORF1a) Real-Time PCR Kit for the Detection of Middle East Respiratory Syndrome Coronavirus RNA_<sup><a href="#fnote5" rel="noopener" target="_self">5</a></sup> reports:
> The diagnostic sensitivity and specificity of the PowerChek MERS assay were both 100% (95% confidence interval, 91.1–100%).
The [Emergency Use Authorization for LabCorp's rt-PCR test](https://www.fda.gov/media/136151/download)<sup><a href="#fnote6" rel="noopener" target="_self">6</a></sup> reports:
~~~
Performance of the COVID-19 RT-PCR test against the expected results [ with NP swabs ] are:
Positive Percent Agreement 40/40 = 100% (95% CI: 91.24%-100%)
Negative Percent Agreement 50/50 = 100% (95% CI: 92.87% -100%)
~~~
Using the lower bound of the 95% CI, values convert to a specificity of .90 and sensitivity of .94.
A recent report characterizes Abbott Labs ID NOW system, used for influenza tests. [Abbott Labs recieved an EUA](https://www.fda.gov/media/136525/download), on 27 March 2020, for a version of the device for use with COVID-19. The study of the the influenza version states:
> The sensitivities of ID NOW 2 for influenza A were 95.9% and 95.7% in NPS and NPA, respectively, and for influenza B were 100% and 98.7% in NPS and NPA, respectively. The specificity was 100% for both influenza A and influenza B in NPS and NPA.
The results section of the paper provides these parameters, when compared to rRT-PCR:
<table>
<tr>
<th>Virus</th>
<th>Parameter</th>
<th>ID NOW 2</th>
<th> ID NOW 2 VTM</th>
</tr>
<tr>
<td>type A</td>
<td>Sensitivity (95% CI)</td>
<td>95.7 (89.2-98.8)</td>
<td>96.7 (90.8-99.3)</td>
</tr>
<tr>
<td></td>
<td>Specificity (95% CI)</td>
<td>100 (89.3-100) </td>
<td>100 (89.3-100)</td>
</tr>
<tr>
<td>Type B</td>
<td>Sensitivity (95% CI)</td>
<td>98.7 (93.0-100)</td>
<td>100 (96.2-100)</td>
</tr>
<tr>
<td></td>
<td>Specificity (95% CI)</td>
<td>100 (98.5-100)</td>
<td>100 (98.5-100)</td>
</tr>
</table>
A recent Medscape article<a href="#fnote7" rel="noopener" target="_self">7</a></sup> on the specificity and sensitivity of Influenza tests reports:
> In a study of the nucleic acid amplification tests ID Now (Abbott), Cobas Influenza A/B Assay (Roche Molecular Diagnostics), and Xpert Xpress Flu (Cepheid), Kanwar et al found the three products to have comparable sensitivities for influenza A (93.2%, 100%, 100%, respectively) and B (97.2%, 94.4%, 91.7%, respectively) detection. In addition, each product had greater than 97% specificity for influenza A and B detection.
> Rapid antigen tests generally have a sensitivity of 50-70% and a specificity of 90-95%. Limited studies have demonstrated very low sensitivity for detection of 2009 H1N1 with some commercial brands.
Based on these values, we'll explore the effects of sensitivity and specificities in the range of .9 to 1.
_____no_output_____# PPV For Serology Test
First we'll look at the positive prediction value for the antibody test in reference (<a href="#fnote2" rel="noopener" target="_self">2</a>), which has the lowest published Sp and Sn values at .9063 and .8866. The plot below shows the portion of positive test results that are true positives s a function of the prevalence.
_____no_output_____
<code>
def p_vs_tpr(Sp, Sn):
for p in np.power(10,np.linspace(-7,np.log10(.5), num=100)): # range from 1 per 10m to 50%
ppv = (p*Sn) / ( (p*Sn)+(1-p)*(1-Sp))
yield (p, ppv)
def plot_ppv(Sp, Sn):
df = pd.DataFrame(list(p_vs_tpr(Sp, Sn)), columns='p ppv'.split())
df.head()
fig, ax = plt.subplots(figsize=(12,8))
df.plot(ax=ax, x='p',y='ppv', figsize=(10,10))
fig.suptitle(f'Portion of Positives that Are True Vs Prevalence\nFor test with Sp={Sp} and Sn={Sn}', fontsize=20)
ax.set_xlabel('Condition Prevalence in Portion of Tested Population', fontsize=18)
ax.set_ylabel('Portion of Positive Test Results that are True Positives', fontsize=18);
#ax.set_xscale('log')
#ax.set_yscale('log')
plot_ppv(Sp = .9063, Sn = .8866)
_____no_output_____
</code>
The important implication of this curve is that using a test with low Sp and Sn values in conditions of low prevalence will result in a very large portion of false positives._____no_output_____# False Positives for LabCorp's test
Although the published results for the LabCorp test are 100% true positives and true negative rates, the 95% error margin is substantial, because the test was validatd with a relatively small number of samples. This analysis will use the published error margins to produce a distribution of positive prediction values. First, let's look at the distributions of the true positive and true negative rates, accounting for the published confidence intervals. These distributions are generated by converting the published true and false rates, and their CIs into gaussian distributions, and selecting only values that are 1 or lower from those distributions.
_____no_output_____
<code>
# Convert CI to standard error. The values are reported for a one-sided 95% CI,
# so we're multiplying by the conversion for a two-sided 90% ci
p_se = (1-.9124) * 1.645
n_se = (1-.9287) * 1.645
def select_v(se):
"""get a distribution value, which must be less than or equal to 1"""
while True:
v = np.random.normal(1, se)
if v <= 1:
return v
# These values are not TP and FP counts; they are normalized to
# prevalence
TP = np.array(list(select_v(p_se) for _ in range(100_000)))
TN = np.array(list(select_v(n_se) for _ in range(100_000)))
fig, ax = plt.subplots(1,2, figsize=(12,8))
sns.distplot( TP, ax=ax[0], kde=False);
ax[0].set_title('Distribution of Posibile True Positives Rates');
sns.distplot( TN, ax=ax[1], kde=False);
ax[1].set_title('Distribution of Posibile True Negative Rates');
fig.suptitle(f'Distribution of True Positive and Negative Rates'
'\nFor published confidence intervals and 4K random samples', fontsize=20);
_____no_output_____
</code>
It is important to note that these are not the distributions
From these distributions, we can calculate the distributions for the positive prediction value, the portion of all positive results that are true positives. _____no_output_____With these distributions, we can use ([Eq 2](#MathJax-Span-5239)) to compute the distributions of PPV for a variety of prevalences. In each chart, the 'mean' is the expectation value of the distribution, the weighted mean of the values. It is the most likely PPV valule for the given prevalence. _____no_output_____
<code>
FP = 1-TN
FN = 1-TP
Sn = TP / (TP+FN)
Sp = TN / (TN+FP)
def ppv_dist_ufunc(p, Sp, Sn):
return (p*Sn) / ( (p*Sn)+(1-p)*(1-Sp))
def ppv_dist(p, Sp, Sn):
sp = np.random.choice(Sp, 1_000_000, replace=True)
sn = np.random.choice(Sn, 1_000_000, replace=True)
return ppv_dist_ufunc(p,sp, sn)
fig, axes = plt.subplots( 2,2, figsize=(15,15))
axes = axes.flat
def plot_axis(axn, prevalence):
ppvd = ppv_dist(prevalence, Sp, Sn)
wmean = (ppvd.sum()/len(ppvd)).round(4)
sns.distplot( ppvd, ax=axes[axn], kde=False);
axes[axn].set_title(f' prevalence = {prevalence}, mean={wmean}');
axes[axn].set_xlabel('Positive Prediction Value (PPV)')
axes[axn].set_ylabel('PPV Frequency')
plot_axis(0, .001)
plot_axis(1, .01)
plot_axis(2, .10)
plot_axis(3, .5)
fig.suptitle(f'Distribution of PPV Values for LabCorp Test\nBy condition prevalence', fontsize=20);
_____no_output_____
</code>
The implication of these charts is that, even for a test with published true positive and true negative rate of 100%, the uncertainties in the measurements can mean that there still a substantial problem of false positives for low prevalences. _____no_output_____Computing the mean PPV value or a range of prevalence values results in the following relationship._____no_output_____
<code>
def ppv_vs_p():
for p in np.power(10,np.linspace(-7,np.log10(1), num=100)): # range from 1 per 10m to 50%
ppvd = ppv_dist(p, Sp, Sn)
yield p, ppvd.sum()/len(ppvd)
ppv_v_p = pd.DataFrame(list(ppv_vs_p()), columns='p ppv'.split())
fig, ax = plt.subplots(figsize=(8,8))
sns.lineplot(x='p', y='ppv', data=ppv_v_p, ax=ax)
ax.set_xlabel('Prevalence')
ax.set_ylabel('Positive Predictive Value')
fig.suptitle("Positive Predictive Value vs Prevalence\nFor LabCorp Test", fontsize=18);_____no_output_____
</code>
Compare this curve to the one presented earlier, for the antibody test with published sensitivity of 88.66% and specificity of 90.63%; The relationship between P and PPV for the rt-PCR test isn't much better.
But what if the tests are really, really good: .99 for both sensitivity and specificity? Here is the curve for that case:
_____no_output_____
<code>
def ppv_vs_p():
for p in np.power(10,np.linspace(-7,np.log10(1), num=100)): # range from 1 per 10m to 50%
ppvd = ppv_dist_ufunc(p, .99, .99)
yield p, ppvd
ppv_v_p = pd.DataFrame(list(ppv_vs_p()), columns='p ppv'.split())
fig, ax = plt.subplots(figsize=(8,8))
sns.lineplot(x='p', y='ppv', data=ppv_v_p, ax=ax)
ax.set_xlabel('Prevalence')
ax.set_ylabel('Positive Predictive Value')
fig.suptitle("Positive Predictive Value vs Prevalence\nFor Sp=.99, Sn=.99", fontsize=18);_____no_output_____
</code>
This table shows the PPVs and false positive rate for a logrhythimic range of prevalences. _____no_output_____
<code>
prevs = [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1]
names = ["1 per {}".format(round(1/p,0)) for p in prevs]
ppvs = [ppv_v_p.loc[(ppv_v_p.p-p).abs().idxmin()].ppv for p in prevs]
fp = [ str(round((1-ppv)*100,1))+"%" for ppv in ppvs]
df = pd.DataFrame({
'Rate': names,
'Prevalence': prevs,
'PPV': ppvs,
'False Positives Rate': fp
}).set_index('Prevalence')
df
_____no_output_____
</code>
This case is much better, across the range of prevalences, but for low prevalence, there are still a lot of false positives, and below 1 per 1000, it is nearly all false positives. Here is the same chart, but for Sp and Sn at 99.99%_____no_output_____
<code>
def ppv_vs_p():
for p in np.power(10,np.linspace(-7,np.log10(1), num=100)): # range from 1 per 10m to 50%
ppvd = ppv_dist_ufunc(p, .9999, .9999)
yield p, ppvd
ppv_v_p = pd.DataFrame(list(ppv_vs_p()), columns='p ppv'.split())
ppvs = [ppv_v_p.loc[(ppv_v_p.p-p).abs().idxmin()].ppv for p in prevs]
fp = [ str(round((1-ppv)*100,1))+"%" for ppv in ppvs]
df = pd.DataFrame({
'Rate': names,
'Prevalence': prevs,
'PPV': ppvs,
'False Positives Rate': fp
}).set_index('Prevalence')
df
_____no_output_____
</code>
Even a very accurate test will not be able to distinguish healthy from sick better than a coin flip if the prevalence is less than 1 per 10,000. _____no_output_____# Conclusion
Tests with less than 100% specificity and selectivity, including those with published values of 100% but with a moderate confidence interval, are very sensitive to low condition prevalences. Considering the confidence intervals, to ensure that 50% of positive results are true positives requires a prevalence of about 10%, and 80% PPV requires about a 30% prevalence. This suggests that using rt-PCR tests to test a large population that has a low prevalence is likely to produce a large number of false positive results. _____no_output_____# References
* <a name="fnote1">1</a> Parikh, Rajul et al. “[Understanding and using sensitivity, specificity and predictive values.](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2636062/)” Indian journal of ophthalmology vol. 56,1 (2008): 45-50. doi:10.4103/0301-4738.37595
* <a name="fnote2">2</a> Li, Zhengtu et al. “[Development and Clinical Application of A Rapid IgM-IgG Combined Antibody Test for SARS-CoV-2 Infection Diagnosis.](https://pubmed.ncbi.nlm.nih.gov/32104917/)” Journal of medical virology, 10.1002/jmv.25727. 27 Feb. 2020, doi:10.1002/jmv.25727
* <a name="fnote3">3</a> Zhuang, G H et al. “[Potential False-Positive Rate Among the 'Asymptomatic Infected Individuals' in Close Contacts of COVID-19 Patients](https://pubmed.ncbi.nlm.nih.gov/32133832)” Zhonghua liuxingbingxue zazhi, vol. 41,4 485-488. 5 Mar. 2020, doi:10.3760/cma.j.cn112338-20200221-00144
* <a name="fnote4">4</a> Al Johani, Sameera, and Ali H Hajeer. “[MERS-CoV diagnosis: An update.](https://www.sciencedirect.com/science/article/pii/S1876034116300223)” Journal of infection and public health vol. 9,3 (2016): 216-9. doi:10.1016/j.jiph.2016.04.005
* <a name="fnote5">5</a> Huh, Hee Jae et al. “[Performance Evaluation of the PowerChek MERS (upE & ORF1a) Real-Time PCR Kit for the Detection of Middle East Respiratory Syndrome Coronavirus RNA.](http://www.annlabmed.org/journal/view.html?volume=37&number=6&spage=494)” Annals of laboratory medicine vol. 37,6 (2017): 494-498. doi:10.3343/alm.2017.37.6.494
* <a name="fnote7">7</a> [Emergency Use Authorization summary](https://www.fda.gov/media/136151/download) for LabCorp's COVID-19 rt-PCR test.
* Mitamura, Keiko et al. “[Clinical evaluation of ID NOW influenza A & B 2, a rapid influenza virus detection kit using isothermal nucleic acid amplification technology - A comparison with currently available tests.](https://pubmed.ncbi.nlm.nih.gov/31558351/?from_single_result=31558351)” Journal of infection and chemotherapy : official journal of the Japan Society of Chemotherapy vol. 26,2 (2020): 216-221. doi:10.1016/j.jiac.2019.08.015
* <a name="fnote7">8</a> Blanco, E. M. (2020, January 22). [What is the sensitivity and specificity of diagnostic influenza tests?](https://www.medscape.com/answers/2053517-197226/what-is-the-sensitivity-and-specificity-of-diagnostic-influenza-tests) Retrieved March 27, 2020, from https://www.medscape.com/answers/2053517-197226/what-is-the-sensitivity-and-specificity-of-diagnostic-influenza-tests
## Supporting Web Articles
The World Health Organization has a [web page with links to information the COVID-19 tests](https://www.who.int/emergencies/diseases/novel-coronavirus-2019/technical-guidance/laboratory-guidance) from many countries.
The CDC's page for [Rapid Diagnostic Testing for Influenza: Information for Clinical Laboratory Directors](https://www.cdc.gov/flu/professionals/diagnosis/rapidlab.htm) describes the minimum specificity and sensitivity for rapid influenza diagnostic tests, and shows some examples of PPV and flase positive rates.
Washington Post: [A ‘negative’ coronavirus test result doesn’t always mean you aren’t infected](https://www.washingtonpost.com/science/2020/03/26/negative-coronavirus-test-result-doesnt-always-mean-you-arent-infected/)
Prague Morning: [80% of Rapid COVID-19 Tests the Czech Republic Bought From China are Wrong](https://www.praguemorning.cz/80-of-rapid-covid-19-tests-the-czech-republic-bought-from-china-are-wrong/)
BusinessInsider: [Spain, Europe's worst-hit country after Italy, says coronavirus tests it bought from China are failing to detect positive cases](https://www.businessinsider.com/coronavirus-spain-says-rapid-tests-sent-from-china-missing-cases-2020-3?op=1)
Wikipedia has a good discussion of the false positives problem in the articl about the [Base Rate Falacy](https://en.wikipedia.org/wiki/Base_rate_fallacy#False_positive_paradox).
## Other References
The following references were referenced by Blanco <a href="#fnote6" rel="noopener" target="_self">6</a></sup>, but I haven't evaluated them yet.
Kanwar N, Michael J, Doran K, Montgomery E, Selvarangan R. Comparison of the ID NOWTM Influenza A & B 2, Cobas® Influenza A/B, and Xpert® Xpress Flu Point-of-Care Nucleic Acid Amplification Tests for Influenza A/B Detection in Children. J Clin Microbiol. 2020 Jan 15.
Blyth CC, Iredell JR, Dwyer DE. Rapid-test sensitivity for novel swine-origin influenza A (H1N1) virus in humans. N Engl J Med. 2009 Dec 17. 361(25):2493.
Evaluation of rapid influenza diagnostic tests for detection of novel influenza A (H1N1) Virus - United States, 2009. MMWR Morb Mortal Wkly Rep. 2009 Aug 7. 58(30):826-9.
Faix DJ, Sherman SS, Waterman SH. Rapid-test sensitivity for novel swine-origin influenza A (H1N1) virus in humans. N Engl J Med. 2009 Aug 13. 361(7):728-9.
Ginocchio CC, Zhang F, Manji R, Arora S, Bornfreund M, Falk L. Evaluation of multiple test methods for the detection of the novel 2009 influenza A (H1N1) during the New York City outbreak. J Clin Virol. 2009 Jul. 45(3):191-5.
Sambol AR, Abdalhamid B, Lyden ER, Aden TA, Noel RK, Hinrichs SH. Use of rapid influenza diagnostic tests under field conditions as a screening tool during an outbreak of the 2009 novel influenza virus: practical considerations. J Clin Virol. 2010 Mar. 47(3):229-33.
_____no_output_____# Updates
* 2020-03-25: Changed conversion from CI to SE from 1.96 to 1.645; using the factor for a two sided 90% ci for the 95% one sided CI.
* 2020-03-27: Added parameters for Sp and Sn for the influenza version of Abbott Labs ID NOW device. _____no_output_____
| {
"repository": "sandiegodata/covid19",
"path": "Notebooks/WhyNotTesting.ipynb",
"matched_keywords": [
"RNA",
"virology"
],
"stars": 1,
"size": 245382,
"hexsha": "d0bcd8cd7f29b89ec5695bfc667b9559b60323c2",
"max_line_length": 63872,
"avg_line_length": 271.1403314917,
"alphanum_fraction": 0.9061870879
} |
# Notebook from Vedant1202/ML_SpamMessagesFilter
Path: NLP_Spam_MessagesG.ipynb
<code>
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import nltk
%matplotlib inline_____no_output_____nltk.download_shell()NLTK Downloader
---------------------------------------------------------------------------
d) Download l) List u) Update c) Config h) Help q) Quit
---------------------------------------------------------------------------
Downloader> l
Packages:
[ ] abc................. Australian Broadcasting Commission 2006
[ ] alpino.............. Alpino Dutch Treebank
[ ] averaged_perceptron_tagger Averaged Perceptron Tagger
[ ] averaged_perceptron_tagger_ru Averaged Perceptron Tagger (Russian)
[ ] basque_grammars..... Grammars for Basque
[ ] biocreative_ppi..... BioCreAtIvE (Critical Assessment of Information
Extraction Systems in Biology)
[ ] bllip_wsj_no_aux.... BLLIP Parser: WSJ Model
[ ] book_grammars....... Grammars from NLTK Book
[ ] brown............... Brown Corpus
[ ] brown_tei........... Brown Corpus (TEI XML Version)
[ ] cess_cat............ CESS-CAT Treebank
[ ] cess_esp............ CESS-ESP Treebank
[ ] chat80.............. Chat-80 Data Files
[ ] city_database....... City Database
[ ] cmudict............. The Carnegie Mellon Pronouncing Dictionary (0.6)
[ ] comparative_sentences Comparative Sentence Dataset
[ ] comtrans............ ComTrans Corpus Sample
[ ] conll2000........... CONLL 2000 Chunking Corpus
[ ] conll2002........... CONLL 2002 Named Entity Recognition Corpus
Hit Enter to continue:
[ ] conll2007........... Dependency Treebanks from CoNLL 2007 (Catalan
and Basque Subset)
[ ] crubadan............ Crubadan Corpus
[ ] dependency_treebank. Dependency Parsed Treebank
[ ] dolch............... Dolch Word List
[ ] europarl_raw........ Sample European Parliament Proceedings Parallel
Corpus
[ ] floresta............ Portuguese Treebank
[ ] framenet_v15........ FrameNet 1.5
[ ] framenet_v17........ FrameNet 1.7
[ ] gazetteers.......... Gazeteer Lists
[ ] genesis............. Genesis Corpus
[ ] gutenberg........... Project Gutenberg Selections
[ ] ieer................ NIST IE-ER DATA SAMPLE
[ ] inaugural........... C-Span Inaugural Address Corpus
[ ] indian.............. Indian Language POS-Tagged Corpus
[ ] jeita............... JEITA Public Morphologically Tagged Corpus (in
ChaSen format)
[ ] kimmo............... PC-KIMMO Data Files
[ ] knbc................ KNB Corpus (Annotated blog corpus)
[ ] large_grammars...... Large context-free and feature-based grammars
for parser comparison
Hit Enter to continue:
[ ] lin_thesaurus....... Lin's Dependency Thesaurus
[ ] mac_morpho.......... MAC-MORPHO: Brazilian Portuguese news text with
part-of-speech tags
[ ] machado............. Machado de Assis -- Obra Completa
[ ] masc_tagged......... MASC Tagged Corpus
[ ] maxent_ne_chunker... ACE Named Entity Chunker (Maximum entropy)
[ ] maxent_treebank_pos_tagger Treebank Part of Speech Tagger (Maximum entropy)
[ ] moses_sample........ Moses Sample Models
[ ] movie_reviews....... Sentiment Polarity Dataset Version 2.0
[ ] mte_teip5........... MULTEXT-East 1984 annotated corpus 4.0
[ ] mwa_ppdb............ The monolingual word aligner (Sultan et al.
2015) subset of the Paraphrase Database.
[ ] names............... Names Corpus, Version 1.3 (1994-03-29)
[ ] nombank.1.0......... NomBank Corpus 1.0
[ ] nonbreaking_prefixes Non-Breaking Prefixes (Moses Decoder)
[ ] nps_chat............ NPS Chat
[ ] omw................. Open Multilingual Wordnet
[ ] opinion_lexicon..... Opinion Lexicon
[ ] panlex_swadesh...... PanLex Swadesh Corpora
[ ] paradigms........... Paradigm Corpus
[ ] pe08................ Cross-Framework and Cross-Domain Parser
Evaluation Shared Task
Hit Enter to continue:
[ ] perluniprops........ perluniprops: Index of Unicode Version 7.0.0
character properties in Perl
[ ] pil................. The Patient Information Leaflet (PIL) Corpus
[ ] pl196x.............. Polish language of the XX century sixties
[ ] porter_test......... Porter Stemmer Test Files
[ ] ppattach............ Prepositional Phrase Attachment Corpus
[ ] problem_reports..... Problem Report Corpus
[ ] product_reviews_1... Product Reviews (5 Products)
[ ] product_reviews_2... Product Reviews (9 Products)
[ ] propbank............ Proposition Bank Corpus 1.0
[ ] pros_cons........... Pros and Cons
[ ] ptb................. Penn Treebank
[ ] punkt............... Punkt Tokenizer Models
[ ] qc.................. Experimental Data for Question Classification
[ ] reuters............. The Reuters-21578 benchmark corpus, ApteMod
version
[ ] rslp................ RSLP Stemmer (Removedor de Sufixos da Lingua
Portuguesa)
[ ] rte................. PASCAL RTE Challenges 1, 2, and 3
[ ] sample_grammars..... Sample Grammars
[ ] semcor.............. SemCor 3.0
Hit Enter to continue:
[ ] senseval............ SENSEVAL 2 Corpus: Sense Tagged Text
[ ] sentence_polarity... Sentence Polarity Dataset v1.0
[ ] sentiwordnet........ SentiWordNet
[ ] shakespeare......... Shakespeare XML Corpus Sample
[ ] sinica_treebank..... Sinica Treebank Corpus Sample
[ ] smultron............ SMULTRON Corpus Sample
[ ] snowball_data....... Snowball Data
[ ] spanish_grammars.... Grammars for Spanish
[ ] state_union......... C-Span State of the Union Address Corpus
[*] stopwords........... Stopwords Corpus
[ ] subjectivity........ Subjectivity Dataset v1.0
[ ] swadesh............. Swadesh Wordlists
[ ] switchboard......... Switchboard Corpus Sample
[ ] tagsets............. Help on Tagsets
[ ] timit............... TIMIT Corpus Sample
[ ] toolbox............. Toolbox Sample Files
[ ] treebank............ Penn Treebank Sample
[ ] twitter_samples..... Twitter Samples
[ ] udhr2............... Universal Declaration of Human Rights Corpus
(Unicode Version)
[ ] udhr................ Universal Declaration of Human Rights Corpus
Hit Enter to continue:
[ ] unicode_samples..... Unicode Samples
[ ] universal_tagset.... Mappings to the Universal Part-of-Speech Tagset
[ ] universal_treebanks_v20 Universal Treebanks Version 2.0
[ ] vader_lexicon....... VADER Sentiment Lexicon
[ ] verbnet............. VerbNet Lexicon, Version 2.1
[ ] webtext............. Web Text Corpus
[ ] wmt15_eval.......... Evaluation data from WMT15
[ ] word2vec_sample..... Word2Vec Sample
[ ] wordnet............. WordNet
[ ] wordnet_ic.......... WordNet-InfoContent
[ ] words............... Word Lists
[ ] ycoe................ York-Toronto-Helsinki Parsed Corpus of Old
English Prose
Collections:
[P] all-corpora......... All the corpora
[P] all-nltk............ All packages available on nltk_data gh-pages
branch
[P] all................. All packages
[P] book................ Everything used in the NLTK Book
[P] popular............. Popular packages
[ ] tests............... Packages for running tests
Hit Enter to continue:
[ ] third-party......... Third-party data packages
([*] marks installed packages; [P] marks partially installed collections)
---------------------------------------------------------------------------
d) Download l) List u) Update c) Config h) Help q) Quit
---------------------------------------------------------------------------
Downloader>
---------------------------------------------------------------------------
d) Download l) List u) Update c) Config h) Help q) Quit
---------------------------------------------------------------------------
Downloader> l
Packages:
[ ] abc................. Australian Broadcasting Commission 2006
[ ] alpino.............. Alpino Dutch Treebank
[ ] averaged_perceptron_tagger Averaged Perceptron Tagger
[ ] averaged_perceptron_tagger_ru Averaged Perceptron Tagger (Russian)
[ ] basque_grammars..... Grammars for Basque
[ ] biocreative_ppi..... BioCreAtIvE (Critical Assessment of Information
Extraction Systems in Biology)
[ ] bllip_wsj_no_aux.... BLLIP Parser: WSJ Model
[ ] book_grammars....... Grammars from NLTK Book
[ ] brown............... Brown Corpus
[ ] brown_tei........... Brown Corpus (TEI XML Version)
[ ] cess_cat............ CESS-CAT Treebank
[ ] cess_esp............ CESS-ESP Treebank
[ ] chat80.............. Chat-80 Data Files
[ ] city_database....... City Database
[ ] cmudict............. The Carnegie Mellon Pronouncing Dictionary (0.6)
[ ] comparative_sentences Comparative Sentence Dataset
[ ] comtrans............ ComTrans Corpus Sample
[ ] conll2000........... CONLL 2000 Chunking Corpus
[ ] conll2002........... CONLL 2002 Named Entity Recognition Corpus
messages = [line.rstrip() for line in open('SMSSpamCollection')] ## Put in your dataset here_____no_output_____len(messages)_____no_output_____messages[50]_____no_output_____for msg_no, message in enumerate(messages[:10]):
print(msg_no, message)
print('\n')0 ham Go until jurong point, crazy.. Available only in bugis n great world la e buffet... Cine there got amore wat...
1 ham Ok lar... Joking wif u oni...
2 spam Free entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's
3 ham U dun say so early hor... U c already then say...
4 ham Nah I don't think he goes to usf, he lives around here though
5 spam FreeMsg Hey there darling it's been 3 week's now and no word back! I'd like some fun you up for it still? Tb ok! XxX std chgs to send, £1.50 to rcv
6 ham Even my brother is not like to speak with me. They treat me like aids patent.
7 ham As per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune
8 spam WINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.
9 spam Had your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030
messages = pd.read_csv('SMSSpamCollection', sep='\t', names=['Label', 'Message'])
messages.head()_____no_output_____messages.describe()_____no_output_____messages.groupby('Label').describe()_____no_output_____messages['Length'] = messages['Message'].apply(len)_____no_output_____messages.head()_____no_output_____plt.figure(figsize=(16,12))
sns.distplot(messages['Length'], bins=100, kde=False, color='black')C:\Users\VEDANT NANDOSKAR\Anaconda3\lib\site-packages\matplotlib\axes\_axes.py:6462: UserWarning: The 'normed' kwarg is deprecated, and has been replaced by the 'density' kwarg.
warnings.warn("The 'normed' kwarg is deprecated, and has been "
messages['Length'].describe()_____no_output_____messages[messages['Length'] == 910]['Message'].iloc[0]_____no_output_____messages.hist(column='Length', by='Label', bins=100, figsize=(16,8))_____no_output_____import string
from nltk.corpus import stopwords_____no_output_____def split_intoWords(msg):
## Firstly remove punctuation
noPunc = [char for char in msg if char not in string.punctuation]
## Then join the sepearate characters in a list
noPunc = ''.join(noPunc)
## Finally return only the significant words
return [word for word in noPunc.split() if word.lower() not in stopwords.words('english')]
_____no_output_____messages['Message'].head(5).apply(split_intoWords)_____no_output_____from sklearn.feature_extraction.text import CountVectorizer_____no_output_____bow_transform = CountVectorizer(analyzer=split_intoWords).fit(messages['Message'])_____no_output_____print(len(bow_transform.vocabulary_))11425
messages_bow = bow_transform.transform(messages['Message'])_____no_output_____print('Shape of matrix: ',messages_bow.shape)
print('Non zero occurences: ',messages_bow.nnz)Shape of matrix: (5572, 11425)
Non zero occurences: 50548
sparsity = (100.0 * messages_bow.nnz / (messages_bow.shape[0] * messages_bow.shape[1]))
print('sparsity: {}'.format(sparsity))sparsity: 0.07940295412668218
from sklearn.feature_extraction.text import TfidfTransformer_____no_output_____tfidf_transform = TfidfTransformer().fit(messages_bow)_____no_output_____messages_tfidf = tfidf_transform.transform(messages_bow)_____no_output_____from sklearn.naive_bayes import MultinomialNB_____no_output_____spam_detect_model = MultinomialNB().fit(messages_tfidf, messages['Label'])_____no_output_____predictions = spam_detect_model.predict(messages_tfidf)_____no_output_____predictions_____no_output_____from sklearn.metrics import confusion_matrix, classification_report_____no_output_____confusion_matrix(messages['Label'], predictions)_____no_output_____print(classification_report(messages['Label'], predictions)) precision recall f1-score support
ham 0.98 1.00 0.99 4825
spam 1.00 0.85 0.92 747
avg / total 0.98 0.98 0.98 5572
from sklearn.pipeline import Pipeline_____no_output_____from sklearn.ensemble import RandomForestClassifier_____no_output_____ pipelineRf = Pipeline([
('bow', CountVectorizer(analyzer=split_intoWords)),
('tfidf', TfidfTransformer()),
('classifier', RandomForestClassifier())
])_____no_output_____
</code>
# Now comparing with Random Forest Classifier instead of MultinomialNB.
# Stop here if you dont want to do the ahead steps
# Skip to 'save to csv' step_____no_output_____
<code>
pipelineRf.fit(messages['Message'], messages['Label'])_____no_output_____predictionsRf = pipelineRf.predict(messages['Message'])_____no_output_____confusion_matrix(messages['Label'], predictionsRf)_____no_output_____print(classification_report(messages['Label'], predictionsRf)) precision recall f1-score support
ham 1.00 1.00 1.00 4825
spam 1.00 0.98 0.99 747
avg / total 1.00 1.00 1.00 5572
predictionsRf_____no_output_____predictionsDf = pd.DataFrame(predictions, columns=['Naive Bayes Prediction'])_____no_output_____predictionsDf.head()_____no_output_____predictionsRfDf = pd.DataFrame(predictionsRf, columns=['Random Forest Predictions'])
predictionsRfDf.head()_____no_output_____messagesPred = pd.concat([messages, predictionsDf, predictionsRfDf], axis=1)_____no_output_____messagesPred_____no_output_____messagesPred.to_csv('predictions_spamOrHam_messages.csv', header=True, index_label='Index')_____no_output_____
</code>
| {
"repository": "Vedant1202/ML_SpamMessagesFilter",
"path": "NLP_Spam_MessagesG.ipynb",
"matched_keywords": [
"biology"
],
"stars": null,
"size": 98865,
"hexsha": "d0be082b6d7a15f66b2ac5fa0ad2896c4f105f82",
"max_line_length": 13164,
"avg_line_length": 46.8332543818,
"alphanum_fraction": 0.5467051029
} |
# Notebook from reggiebernardo/notebooks
Path: supp_ntbks_arxiv.2106.08688/gp_ga_reconstruction.ipynb
## Gaussian processes with genetic algorithm for the reconstruction of late-time Hubble data_____no_output_____This notebook uses Gaussian processes (GP) with the genetic algorithm (GA) to reconstruct the cosmic chronometers and supernovae data sets ([2106.08688](https://arxiv.org/abs/2106.08688)). We shall construct our own GP class and use it with the python package ``pygad`` (https://pygad.readthedocs.io/) for the GA.
References to the data can be found at the end of the notebook._____no_output_____
<code>
%matplotlib inline
import numpy as np
from numpy.random import uniform as unif
import matplotlib.pyplot as plt
import pygad_____no_output_____
</code>
### 0. My GP class_____no_output_____Here is the GP class (written from scratch) that we shall use in this notebook._____no_output_____
<code>
class GP:
'''Class for making GP predictions.
rbf: k(r) = A^2 \exp(-r^2/(2l^2))
rq : k(r) = A^2 (1 + (r^2/(2 \alpha l^2)))^{-\alpha}
m52: k(r) = A^2 \exp(-\sqrt{5}r/l)
(1 + \sqrt{5}r/l + 5r^2/(3l^2))
mix: rbf + chy + m52
Input:
chromosome: list of kernel hyperparameters
'''
def __init__(self, chromosome):
self.C_rbf = chromosome[0] # rbf genes
self.l_rbf = chromosome[1]
self.n_rbf = chromosome[2]
self.C_rq = chromosome[3] # rq genes
self.l_rq = chromosome[4]
self.a_rq = chromosome[5]
self.n_rq = chromosome[6]
self.C_m52 = chromosome[7] # m52 genes
self.l_m52 = chromosome[8]
self.n_m52 = chromosome[9]
def kernel(self, x, y):
r = x - y
# rbf term
k_rbf = np.exp(-(r**2)/(2*(self.l_rbf**2)))
rbf_term = (self.C_rbf**2)*(k_rbf**self.n_rbf)
# rq term
r = x - y
R_sq = (r**2)/(2*(self.l_rq**2))
k_rq = 1/((1 + R_sq/self.a_rq)**self.a_rq)
rq_term = (self.C_rq**2)*(k_rq**self.n_rq)
# m52 term
X = np.sqrt(5)*np.abs(r)/self.l_m52
B = 1 + X + ((X**2)/3)
k_m52 = B*np.exp(-X)
m52_term = (self.C_m52**2)*(k_m52**self.n_m52)
return rbf_term + rq_term + m52_term
def k_plus_c_inv(self, Z, C):
k_ZZ = np.array([[self.kernel(z_i, z_j) \
for z_i in Z]
for z_j in Z])
return np.linalg.inv(k_ZZ + C)
def cov(self, Z, C, Zs):
'''Returns the covariance matrix at Zs.
Note: Zs must be an array.'''
kpc_inv = self.k_plus_c_inv(Z, C)
return np.array([[self.kernel(z_i, z_j) \
-(self.kernel(z_i, Z) @ \
kpc_inv @ \
self.kernel(Z, z_j)) \
for z_i in Zs] \
for z_j in Zs])
def var(self, Z, C, Zs):
'''Returns the variance at Zs.
Note: Zs must be an array.'''
kpc_inv = self.k_plus_c_inv(Z, C)
return np.array([self.kernel(zs, zs) \
-(self.kernel(zs, Z) @ \
kpc_inv @ \
self.kernel(Z, zs)) \
for zs in Zs])
def get_logmlike(self, Z, Y, C):
'''Returns the log-marginal likelihood.'''
kpc_inv = self.k_plus_c_inv(Z, C)
kpc = np.linalg.inv(kpc_inv)
kpc_det = np.linalg.det(kpc)
Ys = np.array([(self.kernel(zs, Z) @ kpc_inv \
@ Y) for zs in Z])
delta_y = Y
return -0.5*(delta_y @ kpc_inv @ delta_y) \
-0.5*np.log(kpc_det) \
-0.5*len(Z)*np.log(2*np.pi)
def predict(self, Z, Y, C, Zs, with_cov = False, \
k_as_cov = False):
kpc_inv = self.k_plus_c_inv(Z, C)
mean = np.array([(self.kernel(zs, Z) @ kpc_inv \
@ Y) for zs in Zs])
if with_cov == False:
var_zz = self.var(Z, C, Zs)
return {'z': Zs, 'Y': mean, \
'varY': var_zz}
elif (with_cov == True) and (k_as_cov == False):
cov_zz = self.cov(Z, C, Zs)
return {'z': Zs, 'Y': mean, \
'covY': cov_zz}
elif (with_cov == True) and (k_as_cov == True):
cov_zz = np.array([[self.kernel(z_i, z_j) \
for z_i in Zs] \
for z_j in Zs])
return {'z': Zs, 'Y': mean, \
'covY': cov_zz}_____no_output_____
</code>
This will be used for both the cosmic chronometers (Section 1) and supernovae applications (Section 2)._____no_output_____### 1. Cosmic chronometers_____no_output_____Importing the cosmic chronometers data set._____no_output_____
<code>
cc_data = np.loadtxt('cc_data.txt')
z_cc = cc_data[:, 0]
Hz_cc = cc_data[:, 1]
sigHz_cc = cc_data[:, 2]
fig, ax = plt.subplots()
ax.errorbar(z_cc, Hz_cc, yerr = sigHz_cc,
fmt = 'ro', ecolor = 'k',
markersize = 7, capsize = 3)
ax.set_xlabel('$z$')
ax.set_ylabel('$H(z)$')
plt.show()_____no_output_____
</code>
To use the GA, we setup the log-marginal likelihood as a fitness function. In addition, we consider a Bayesian-information type penalty to fine complex kernels._____no_output_____
<code>
n_data = len(z_cc)
def penalty(chromosome):
'''Identifies a penalty term to be factored in the fitness function
so that longer/more complex kernels will be given a due weight.'''
c_rbf = chromosome[0]
l_rbf = chromosome[1]
A_rbf = c_rbf*l_rbf
c_rq = chromosome[3]
l_rq = chromosome[4]
A_rq = c_rq*l_rq
c_m52 = chromosome[7]
l_m52 = chromosome[8]
A_m52 = c_m52*l_m52
# set threshold to A_X = c_x*l_x
A_th = 1e-3
k = 0
if A_rbf > A_th:
k += 3
if A_rq > A_th:
k += 4
if A_m52 > A_th:
k += 3
return k*np.log(n_data)/2
def get_fit(chromosome):
'''Evaluates the fitness of the indivial with chromosome'''
if all(hp > 0 for hp in chromosome) == True:
pnl = penalty(chromosome)
try:
gp = GP(chromosome)
lml = gp.get_logmlike(z_cc, Hz_cc,
np.diag(sigHz_cc**2))
return lml - pnl
except:
lml = -1000
return lml
else:
lml = -1000
return lml
def fitness_function(chromosome, chromosome_idx):
return get_fit(chromosome)_____no_output_____
</code>
In the next line, we setup an equally uniform population of pure-bred kernels and a diverse set of kernels. It is interesting to see the evolution of the uniform population compared to one which is a lot more diverse._____no_output_____
<code>
pop_size = 1000 # population size
init_uni = []
for i in range(0, pop_size):
if i < int(pop_size/3):
init_uni.append([unif(0, 300), unif(0, 10), unif(0, 5),
0, 0, 0, 0, 0, 0, 0])
elif (i > int(pop_size/3)) and (i < int(2*pop_size/3)):
init_uni.append([0, 0, 0,
unif(0, 300), unif(0, 10), unif(0, 2), unif(0, 5),
0, 0, 0])
else:
init_uni.append([0, 0, 0, 0, 0, 0, 0,
unif(0, 300), unif(0, 10), unif(0, 5)])
init_uni = np.array(init_uni)
init_div = []
for i in range(0, pop_size):
init_div.append([unif(0, 300), unif(0, 10), unif(0, 5),
unif(0, 300), unif(0, 10), unif(0, 2), unif(0, 5),
unif(0, 300), unif(0, 10), unif(0, 5)])
init_div = np.array(init_div)_____no_output_____
</code>
Given this, we prepare the parameters of the GA._____no_output_____
<code>
gene_space = [{'low': 0, 'high': 300}, {'low': 0, 'high': 10}, {'low': 0, 'high': 5}, # rbf lims
{'low': 0, 'high': 300}, {'low': 0, 'high': 10}, # chy lims
{'low': 0, 'high': 2}, {'low': 0, 'high': 5},
{'low': 0, 'high': 300}, {'low': 0, 'high': 10}, {'low': 0, 'high': 5}] # m52 lims
num_genes = 10 # length of chromosome
n_gen = 100 # number of generations
sel_rate = 0.3 # selection rate
# parent selection
parent_selection_type = "rws" # roulette wheel selection
keep_parents = int(sel_rate*pop_size)
num_parents_mating = int(sel_rate*pop_size)
# crossover
#crossover_type = "single_point"
#crossover_type = "two_points"
#crossover_type = "uniform"
crossover_type = "scattered"
crossover_prob = 1.0
# mutation type options: random, swap, inversion, scramble, adaptive
mutation_type = "random"
#mutation_type = "swap"
#mutation_type = "inversion"
#mutation_type = "scramble"
#mutation_type = "adaptive"
mutation_prob = 0.5
def callback_generation(ga_instance):
i_gen = ga_instance.generations_completed
if i_gen in [i for i in range(0, n_gen, int(n_gen*0.1))]:
last_best = ga_instance.best_solutions[-1]
print("generation = {generation}".format(generation = i_gen))
print("fitness = {fitness}".format(fitness = get_fit(last_best)))_____no_output_____
</code>
The ``GA run`` is performed in the next line.
*The next two code runs may be skipped if output have already been saved. In this case, proceed to the loading lines._____no_output_____
<code>
# setup GA instance, for random initial pop.
ga_inst_uni_cc = pygad.GA(initial_population = init_uni,
num_genes = num_genes,
num_generations = n_gen,
num_parents_mating = num_parents_mating,
fitness_func = fitness_function,
parent_selection_type = parent_selection_type,
keep_parents = keep_parents,
crossover_type = crossover_type,
crossover_probability = crossover_prob,
mutation_type = mutation_type,
mutation_probability = mutation_prob,
mutation_by_replacement = True,
on_generation = callback_generation,
gene_space = gene_space,
save_best_solutions = True)
# perform GA run
ga_inst_uni_cc.run()
# save results
ga_inst_uni_cc.save('gp_ga_cc_uniform_init')
# best solution
solution = ga_inst_uni_cc.best_solutions[-1]
print("best chromosome: {solution}".format(solution = solution))
print("best fitness = {solution_fitness}".format(solution_fitness = \
get_fit(solution)))_____no_output_____
</code>
Next run creates a GA instance with the same parameters as with the previous run, but a a diversified initial population._____no_output_____
<code>
ga_inst_div_cc = pygad.GA(initial_population = init_div,
num_genes = num_genes,
num_generations = n_gen,
num_parents_mating = num_parents_mating,
fitness_func = fitness_function,
parent_selection_type = parent_selection_type,
keep_parents = keep_parents,
crossover_type = crossover_type,
crossover_probability = crossover_prob,
mutation_type = mutation_type,
mutation_probability = mutation_prob,
mutation_by_replacement = True,
on_generation = callback_generation,
gene_space = gene_space,
save_best_solutions = True)
# perform GA run
ga_inst_div_cc.run()
# save results
ga_inst_div_cc.save('gp_ga_cc_diverse_init')
# best solution
solution = ga_inst_div_cc.best_solutions[-1]
print("best chromosome: {solution}".format(solution = solution))
print("best fitness = {solution_fitness}".format(solution_fitness = \
get_fit(solution)))_____no_output_____
</code>
``Loading lines``
We can load the ``pygad`` results should they have been saved already in previous runs._____no_output_____
<code>
load_ga_uniform = pygad.load('gp_ga_cc_uniform_init')
load_ga_diverse = pygad.load('gp_ga_cc_diverse_init')_____no_output_____
</code>
We can view the prediction based on this superior individual below._____no_output_____
<code>
# champion chromosomes
chr_1 = load_ga_uniform.best_solutions[-1]
chr_2 = load_ga_diverse.best_solutions[-1]
z_min = 0
z_max = 3
n_div = 1000
z_rec = np.linspace(z_min, z_max, n_div)
champs = {}
champs['uniform'] = {'chromosome': chr_1}
champs['diverse'] = {'chromosome': chr_2}
for champ in champs:
chromosome = champs[champ]['chromosome']
gp = GP(chromosome)
rec = gp.predict(z_cc, Hz_cc, np.diag(sigHz_cc**2),
z_rec)
Hz_rec, sigHz_rec = rec['Y'], np.sqrt(rec['varY'])
H0 = Hz_rec[0]
sigH0 = sigHz_rec[0]
# compute chi2
Hz = gp.predict(z_cc, Hz_cc, np.diag(sigHz_cc**2),
z_cc)['Y']
chi2 = np.sum(((Hz - Hz_cc)/sigHz_cc)**2)
# print GA measures
print(champ)
print('H0 =', np.round(H0, 1), '+/-', np.round(sigH0, 1))
print('log-marginal likelihood',
gp.get_logmlike(z_cc, Hz_cc, np.diag(sigHz_cc**2)))
print('penalty', penalty(chromosome))
print('fitness function', get_fit(chromosome))
print('chi^2', chi2)
print()
champs[champ]['z'] = z_rec
champs[champ]['Hz'] = Hz_rec
champs[champ]['sigHz'] = sigHz_rec
# plot champs' predictions
fig, ax = plt.subplots()
ax.errorbar(z_cc, Hz_cc, yerr = sigHz_cc,
fmt = 'kx', ecolor = 'k',
elinewidth = 1, capsize = 2, label = 'CC')
# color, line style, and hatch list
clst = ['b', 'r']
llst = ['-', '--']
hlst = ['|', '-']
for champ in champs:
i = list(champs.keys()).index(champ)
Hz_rec = champs[champ]['Hz']
sigHz_rec = champs[champ]['sigHz']
ax.plot(z_rec, Hz_rec, clst[i] + llst[i],
label = champ)
ax.fill_between(z_rec,
Hz_rec - 2*sigHz_rec,
Hz_rec + 2*sigHz_rec,
facecolor = clst[i], alpha = 0.2,
edgecolor = clst[i], hatch = hlst[i])
ax.set_xlabel('$z$')
ax.set_xlim(z_min, z_max)
ax.set_ylim(1, 370)
ax.set_ylabel('$H(z)$')
ax.legend(loc = 'upper left', prop = {'size': 9.5})
plt.show()uniform
H0 = 69.7 +/- 6.3
log-marginal likelihood -123.7946112476086
penalty 16.83647914993237
fitness function -140.63109039754096
chi^2 12.241263910472332
diverse
H0 = 67.3 +/- 5.9
log-marginal likelihood -123.76904376987132
penalty 16.83647914993237
fitness function -140.6055229198037
chi^2 12.30756281659826
</code>
A plot of the generation vs fitness can also be shown._____no_output_____
<code>
fit_uni = [get_fit(c) for c in load_ga_uniform.best_solutions]
fit_div = [get_fit(c) for c in load_ga_diverse.best_solutions]
fig, ax = plt.subplots()
ax.plot(fit_uni, 'b-', label = 'uniform')
ax.plot(fit_div, 'r--', label = 'diverse')
ax.set_xlabel('generation')
ax.set_ylabel('best fitness')
ax.set_xlim(1, n_gen)
ax.set_ylim(-141.0, -140.5)
ax.legend(loc = 'lower right', prop = {'size': 9.5})
plt.show()_____no_output_____
</code>
### 2. Supernovae Type Ia_____no_output_____In this section, we perform the GP reconstruction with the compressed Pantheon data set._____no_output_____
<code>
# load pantheon compressed m(z) data
loc_lcparam = 'lcparam_DS17f.txt'
loc_lcparam_sys = 'sys_DS17f.txt'
lcparam = np.loadtxt(loc_lcparam, usecols = (1, 4, 5))
lcparam_sys = np.loadtxt(loc_lcparam_sys, skiprows = 1)
# setup pantheon samples
z_ps = lcparam[:, 0]
logz_ps = np.log(z_ps)
mz_ps = lcparam[:, 1]
sigmz_ps = lcparam[:, 2]
# pantheon samples systematics
covmz_ps_sys = lcparam_sys.reshape(40, 40)
covmz_ps_tot = covmz_ps_sys + np.diag(sigmz_ps**2)
# plot data set
plt.errorbar(logz_ps, mz_ps,
yerr = np.sqrt(np.diag(covmz_ps_tot)),
fmt = 'kx', markersize = 4,
ecolor = 'red', elinewidth = 2, capsize = 2)
plt.xlabel('$\ln(z)$')
plt.ylabel('$m(z)$')
plt.show()_____no_output_____
</code>
The fitness function, now taking in the SNe data set, is prepared below for the GA._____no_output_____
<code>
n_data = len(z_ps)
def get_fit(chromosome):
'''Evaluates the fitness of the indivial with chromosome'''
if all(hp > 0 for hp in chromosome) == True:
pnl = penalty(chromosome)
try:
gp = GP(chromosome)
lml = gp.get_logmlike(logz_ps, mz_ps,
covmz_ps_tot)
if np.isnan(lml) == False:
return lml - pnl
else:
return -1000
except:
lml = -1000
return lml
else:
lml = -1000
return lml
def fitness_function(chromosome, chromosome_idx):
return get_fit(chromosome)_____no_output_____
</code>
Then, we setup the initial uniform and diverse kernel populations._____no_output_____
<code>
pop_size = 1000 # population size
init_uni = []
for i in range(0, pop_size):
if i < int(pop_size/3):
init_uni.append([unif(0, 200), unif(0, 100), unif(0, 5),
0, 0, 0, 0, 0, 0, 0])
elif (i > int(pop_size/3)) and (i < int(2*pop_size/3)):
init_uni.append([0, 0, 0,
unif(0, 200), unif(0, 100), unif(0, 2), unif(0, 5),
0, 0, 0])
else:
init_uni.append([0, 0, 0, 0, 0, 0, 0,
unif(0, 200), unif(0, 100), unif(0, 5)])
init_uni = np.array(init_uni)
init_div = []
for i in range(0, pop_size):
init_div.append([unif(0, 200), unif(0, 100), unif(0, 5),
unif(0, 200), unif(0, 100), unif(0, 2), unif(0, 5),
unif(0, 200), unif(0, 100), unif(0, 5)])
init_div = np.array(init_div)_____no_output_____
</code>
The GA parameters can now be set for the SNe fitting._____no_output_____
<code>
gene_space = [{'low': 0, 'high': 200}, {'low': 0, 'high': 100}, {'low': 0, 'high': 5}, # rbf lims
{'low': 0, 'high': 200}, {'low': 0, 'high': 100}, # chy lims
{'low': 0, 'high': 2}, {'low': 0, 'high': 5},
{'low': 0, 'high': 200}, {'low': 0, 'high': 100}, {'low': 0, 'high': 5}] # m52 lims
num_genes = 10 # length of chromosome
n_gen = 100 # number of generations
sel_rate = 0.3 # selection rate
# parent selection
parent_selection_type = "rws" # roulette wheel selection
keep_parents = int(sel_rate*pop_size)
num_parents_mating = int(sel_rate*pop_size)
# crossover
#crossover_type = "single_point"
#crossover_type = "two_points"
#crossover_type = "uniform"
crossover_type = "scattered"
crossover_prob = 1.0
# mutation type options: random, swap, inversion, scramble, adaptive
mutation_type = "random"
#mutation_type = "swap"
#mutation_type = "inversion"
#mutation_type = "scramble"
#mutation_type = "adaptive"
mutation_prob = 0.5_____no_output_____
</code>
Here are the ``GA runs``. We start with the uniform population.
*Skip the runs and jump ahead to loading lines if results have already been prepared._____no_output_____
<code>
ga_inst_uni_sn = pygad.GA(initial_population = init_uni,
num_genes = num_genes,
num_generations = n_gen,
num_parents_mating = num_parents_mating,
fitness_func = fitness_function,
parent_selection_type = parent_selection_type,
keep_parents = keep_parents,
crossover_type = crossover_type,
crossover_probability = crossover_prob,
mutation_type = mutation_type,
mutation_probability = mutation_prob,
mutation_by_replacement = True,
on_generation = callback_generation,
gene_space = gene_space,
save_best_solutions = True)
# perform GA run
ga_inst_uni_sn.run()
# save results
ga_inst_uni_sn.save('gp_ga_sn_uniform_init')
# best solution
solution = ga_inst_uni_sn.best_solutions[-1]
print("best chromosome: {solution}".format(solution = solution))
print("best fitness = {solution_fitness}".format(solution_fitness = \
get_fit(solution)))_____no_output_____
</code>
Here is the GA run for a diversified initial population._____no_output_____
<code>
ga_inst_div_sn = pygad.GA(initial_population = init_div,
num_genes = num_genes,
num_generations = n_gen,
num_parents_mating = num_parents_mating,
fitness_func = fitness_function,
parent_selection_type = parent_selection_type,
keep_parents = keep_parents,
crossover_type = crossover_type,
crossover_probability = crossover_prob,
mutation_type = mutation_type,
mutation_probability = mutation_prob,
mutation_by_replacement = True,
on_generation = callback_generation,
gene_space = gene_space,
save_best_solutions = True)
# perform GA run
ga_inst_div_sn.run()
# save results
ga_inst_div_sn.save('gp_ga_sn_diverse_init')
# best solution
solution = ga_inst_div_sn.best_solutions[-1]
print("best chromosome: {solution}".format(solution = solution))
print("best fitness = {solution_fitness}".format(solution_fitness = \
get_fit(solution)))_____no_output_____
</code>
``Load GA runs``
Saved ``pygad`` output can be accessed. This is done for the SNe runs below._____no_output_____
<code>
load_ga_uniform = pygad.load('gp_ga_sn_uniform_init')
load_ga_diverse = pygad.load('gp_ga_sn_diverse_init')_____no_output_____
</code>
The GP reconstructions are shown below._____no_output_____
<code>
# champion chromosomes
chr_1 = load_ga_uniform.best_solutions[-1]
chr_2 = load_ga_diverse.best_solutions[-1]
z_min = 1e-5
z_max = 3
n_div = 1000
z_rec = np.logspace(np.log(z_min), np.log(z_max), n_div)
logz_rec = np.log(z_rec)
champs = {}
champs['uniform'] = {'chromosome': chr_1}
champs['diverse'] = {'chromosome': chr_2}
for champ in champs:
chromosome = champs[champ]['chromosome']
gp = GP(chromosome)
rec = gp.predict(logz_ps, mz_ps, covmz_ps_tot,
logz_rec)
mz_rec, sigmz_rec = rec['Y'], np.sqrt(rec['varY'])
# compute chi2
mz = gp.predict(logz_ps, mz_ps, covmz_ps_tot,
logz_ps)['Y']
cov_inv = np.linalg.inv(covmz_ps_tot)
delta_H = mz - mz_ps
chi2 = ( delta_H @ cov_inv @ delta_H )
# print GA measures
print(champ)
print('log-marginal likelihood',
gp.get_logmlike(logz_ps, mz_ps, covmz_ps_tot))
print('penalty', penalty(chromosome))
print('fitness function', get_fit(chromosome))
print('chi^2', chi2)
print()
champs[champ]['logz'] = logz_rec
champs[champ]['mz'] = mz_rec
champs[champ]['sigmz'] = sigmz_rec
# plot champs' predictions
fig, ax = plt.subplots()
ax.errorbar(logz_ps, mz_ps,
yerr = np.sqrt(np.diag(covmz_ps_tot)),
fmt = 'kx', ecolor = 'k',
elinewidth = 1, capsize = 2, label = 'SNe')
# color, line style, and hatch list
clst = ['b', 'r']
llst = ['-', '--']
hlst = ['|', '-']
for champ in champs:
i = list(champs.keys()).index(champ)
mz_rec = champs[champ]['mz']
sigmz_rec = champs[champ]['sigmz']
ax.plot(logz_rec, mz_rec, clst[i] + llst[i],
label = champ)
ax.fill_between(logz_rec,
mz_rec - 2*sigmz_rec,
mz_rec + 2*sigmz_rec,
facecolor = clst[i], alpha = 0.2,
edgecolor = clst[i], hatch = hlst[i])
ax.set_xlabel('$\ln(z)$')
ax.set_ylabel('$m(z)$')
ax.set_xlim(np.log(z_min), np.log(z_max))
ax.set_ylim(-10, 30)
ax.legend(loc = 'upper left', prop = {'size': 9.5})
plt.show()uniform
log-marginal likelihood 62.18631886438317
penalty 18.44439727056968
fitness function 43.74192159381349
chi^2 35.429040731552476
diverse
log-marginal likelihood 62.55037313828511
penalty 18.44439727056968
fitness function 44.10597586771543
chi^2 34.62680636352342
</code>
Here is the fitness per generation for the GPs above. _____no_output_____
<code>
fit_uni = [get_fit(c) for c in load_ga_uniform.best_solutions]
fit_div = [get_fit(c) for c in load_ga_diverse.best_solutions]
fig, ax = plt.subplots()
ax.plot(fit_uni, 'b-', label = 'uniform')
ax.plot(fit_div, 'r--', label = 'diverse')
ax.set_xlabel('generation')
ax.set_ylabel('best fitness')
ax.set_xlim(1, n_gen)
ax.set_ylim(41, 45)
ax.legend(loc = 'lower right', prop = {'size': 9.5})
plt.show()_____no_output_____
</code>
### References_____no_output_____***Pantheon***: D. M. Scolnic et al., The Complete Light-curve Sample of Spectroscopically Confirmed SNe Ia
from Pan-STARRS1 and Cosmological Constraints from the Combined Pantheon Sample,
Astrophys. J. 859 (2018) 101 [[1710.00845](https://arxiv.org/abs/1710.00845)].
***Cosmic Chronometers***, from *various sources*:
(1) M. Moresco, L. Pozzetti, A. Cimatti, R. Jimenez, C. Maraston, L. Verde et al., A 6%
measurement of the Hubble parameter at z ∼ 0.45: direct evidence of the epoch of cosmic
re-acceleration, JCAP 05 (2016) 014 [[1601.01701](https://arxiv.org/abs/1601.01701)].
(2) M. Moresco, Raising the bar: new constraints on the Hubble parameter with cosmic
chronometers at z ∼ 2, Mon. Not. Roy. Astron. Soc. 450 (2015) L16 [[1503.01116](https://arxiv.org/abs/1503.01116)].
(3) C. Zhang, H. Zhang, S. Yuan, S. Liu, T.-J. Zhang and Y.-C. Sun, Four new observational H(z)
data from luminous red galaxies in the Sloan Digital Sky Survey data release seven, Research in
Astronomy and Astrophysics 14 (2014) 1221 [[1207.4541](https://arxiv.org/abs/1207.4541)].
(4) D. Stern, R. Jimenez, L. Verde, M. Kamionkowski and S. A. Stanford, Cosmic chronometers:
constraining the equation of state of dark energy. I: H(z) measurements, JCAP 2010 (2010)
008 [[0907.3149](https://arxiv.org/abs/0907.3149)].
(5) M. Moresco et al., Improved constraints on the expansion rate of the Universe up to z ˜1.1 from
the spectroscopic evolution of cosmic chronometers, JCAP 2012 (2012) 006 [[1201.3609](https://arxiv.org/abs/1201.3609)].
(6) Ratsimbazafy et al. Age-dating Luminous Red Galaxies observed with the Southern African
Large Telescope, Mon. Not. Roy. Astron. Soc. 467 (2017) 3239 [[1702.00418](https://arxiv.org/abs/1702.00418)]._____no_output_____
| {
"repository": "reggiebernardo/notebooks",
"path": "supp_ntbks_arxiv.2106.08688/gp_ga_reconstruction.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 132608,
"hexsha": "d0bec9b708f65c4a2b9dc6d460254b4c04952ee3",
"max_line_length": 28216,
"avg_line_length": 117.7690941385,
"alphanum_fraction": 0.829414515
} |
# Notebook from Zanah-Tech/MadeWithML
Path: notebooks/08_Neural_Networks.ipynb
<div align="center">
<h1><img width="30" src="https://madewithml.com/static/images/rounded_logo.png"> <a href="https://madewithml.com/">Made With ML</a></h1>
Applied ML · MLOps · Production
<br>
Join 20K+ developers in learning how to responsibly <a href="https://madewithml.com/about/">deliver value</a> with ML.
<br>
</div>
<br>
<div align="center">
<a target="_blank" href="https://newsletter.madewithml.com"><img src="https://img.shields.io/badge/Subscribe-20K-brightgreen"></a>
<a target="_blank" href="https://github.com/GokuMohandas/MadeWithML"><img src="https://img.shields.io/github/stars/GokuMohandas/MadeWithML.svg?style=social&label=Star"></a>
<a target="_blank" href="https://www.linkedin.com/in/goku"><img src="https://img.shields.io/badge/style--5eba00.svg?label=LinkedIn&logo=linkedin&style=social"></a>
<a target="_blank" href="https://twitter.com/GokuMohandas"><img src="https://img.shields.io/twitter/follow/GokuMohandas.svg?label=Follow&style=social"></a>
<br>
🔥 Among the <a href="https://github.com/topics/deep-learning" target="_blank">top ML</a> repositories on GitHub
</div>
<br>
<hr>_____no_output_____# Neural Networks
In this lesson, we will explore multilayer perceptrons (MLPs) which are a basic type of neural network. We'll first motivate non-linear activation functions by trying to fit a linear model (logistic regression) on our non-linear spiral data. Then we'll implement an MLP using just NumPy and then with PyTorch._____no_output_____<div align="left">
<a target="_blank" href="https://madewithml.com/courses/ml-foundations/neural-networks/"><img src="https://img.shields.io/badge/📖 Read-blog post-9cf"></a>
<a href="https://github.com/GokuMohandas/MadeWithML/blob/main/notebooks/08_Neural_Networks.ipynb" role="button"><img src="https://img.shields.io/static/v1?label=&message=View%20On%20GitHub&color=586069&logo=github&labelColor=2f363d"></a>
<a href="https://colab.research.google.com/github/GokuMohandas/MadeWithML/blob/main/notebooks/08_Neural_Networks.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a>
</div>_____no_output_____# Overview_____no_output_____Our goal is to learn a model $\hat{y}$ that models $y$ given $X$ . You'll notice that neural networks are just extensions of the generalized linear methods we've seen so far but with non-linear activation functions since our data will be highly non-linear.
<div align="left">
<img src="https://raw.githubusercontent.com/GokuMohandas/MadeWithML/main/images/basics/neural-networks/mlp.png" width="500">
</div>
$z_1 = XW_1$
$a_1 = f(z_1)$
$z_2 = a_1W_2$
$\hat{y} = softmax(z_2)$ # classification
* $X$ = inputs | $\in \mathbb{R}^{NXD}$ ($D$ is the number of features)
* $W_1$ = 1st layer weights | $\in \mathbb{R}^{DXH}$ ($H$ is the number of hidden units in layer 1)
* $z_1$ = outputs from first layer $\in \mathbb{R}^{NXH}$
* $f$ = non-linear activation function
* $a_1$ = activation applied first layer's outputs | $\in \mathbb{R}^{NXH}$
* $W_2$ = 2nd layer weights | $\in \mathbb{R}^{HXC}$ ($C$ is the number of classes)
* $z_2$ = outputs from second layer $\in \mathbb{R}^{NXH}$
* $\hat{y}$ = prediction | $\in \mathbb{R}^{NXC}$ ($N$ is the number of samples)_____no_output_____* **Objective:** Predict the probability of class $y$ given the inputs $X$. Non-linearity is introduced to model the complex, non-linear data.
* **Advantages:**
* Can model non-linear patterns in the data really well.
* **Disadvantages:**
* Overfits easily.
* Computationally intensive as network increases in size.
* Not easily interpretable.
* **Miscellaneous:** Future neural network architectures that we'll see use the MLP as a modular unit for feed forward operations (affine transformation (XW) followed by a non-linear operation)._____no_output_____> We're going to leave out the bias terms $\beta$ to avoid further crowding the backpropagation calculations._____no_output_____# Set up_____no_output_____
<code>
import numpy as np
import random_____no_output_____SEED = 1234_____no_output_____# Set seed for reproducibility
np.random.seed(SEED)
random.seed(SEED)_____no_output_____
</code>
## Load data_____no_output_____I created some non-linearly separable spiral data so let's go ahead and download it for our classification task._____no_output_____
<code>
import matplotlib.pyplot as plt
import pandas as pd_____no_output_____# Load data
url = "https://raw.githubusercontent.com/GokuMohandas/MadeWithML/main/datasets/spiral.csv"
df = pd.read_csv(url, header=0) # load
df = df.sample(frac=1).reset_index(drop=True) # shuffle
df.head()_____no_output_____# Data shapes
X = df[['X1', 'X2']].values
y = df['color'].values
print ("X: ", np.shape(X))
print ("y: ", np.shape(y))X: (1500, 2)
y: (1500,)
# Visualize data
plt.title("Generated non-linear data")
colors = {'c1': 'red', 'c2': 'yellow', 'c3': 'blue'}
plt.scatter(X[:, 0], X[:, 1], c=[colors[_y] for _y in y], edgecolors='k', s=25)
plt.show()_____no_output_____
</code>
## Split data_____no_output_____We'll shuffle our dataset (since it's ordered by class) and then create our data splits (stratified on class)._____no_output_____
<code>
import collections
from sklearn.model_selection import train_test_split_____no_output_____TRAIN_SIZE = 0.7
VAL_SIZE = 0.15
TEST_SIZE = 0.15_____no_output_____def train_val_test_split(X, y, train_size):
"""Split dataset into data splits."""
X_train, X_, y_train, y_ = train_test_split(X, y, train_size=TRAIN_SIZE, stratify=y)
X_val, X_test, y_val, y_test = train_test_split(X_, y_, train_size=0.5, stratify=y_)
return X_train, X_val, X_test, y_train, y_val, y_test_____no_output_____# Create data splits
X_train, X_val, X_test, y_train, y_val, y_test = train_val_test_split(
X=X, y=y, train_size=TRAIN_SIZE)
print (f"X_train: {X_train.shape}, y_train: {y_train.shape}")
print (f"X_val: {X_val.shape}, y_val: {y_val.shape}")
print (f"X_test: {X_test.shape}, y_test: {y_test.shape}")
print (f"Sample point: {X_train[0]} → {y_train[0]}")X_train: (1050, 2), y_train: (1050,)
X_val: (225, 2), y_val: (225,)
X_test: (225, 2), y_test: (225,)
Sample point: [-0.63919105 -0.69724176] → c1
</code>
## Label encoding_____no_output_____In the previous lesson we wrote our own label encoder class to see the inner functions but this time we'll use scikit-learn [`LabelEncoder`](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.LabelEncoder.html) class which does the same operations as ours._____no_output_____
<code>
from sklearn.preprocessing import LabelEncoder_____no_output_____# Output vectorizer
label_encoder = LabelEncoder()_____no_output_____# Fit on train data
label_encoder = label_encoder.fit(y_train)
classes = list(label_encoder.classes_)
print (f"classes: {classes}")classes: ['c1', 'c2', 'c3']
# Convert labels to tokens
print (f"y_train[0]: {y_train[0]}")
y_train = label_encoder.transform(y_train)
y_val = label_encoder.transform(y_val)
y_test = label_encoder.transform(y_test)
print (f"y_train[0]: {y_train[0]}")y_train[0]: c1
y_train[0]: 0
# Class weights
counts = np.bincount(y_train)
class_weights = {i: 1.0/count for i, count in enumerate(counts)}
print (f"counts: {counts}\nweights: {class_weights}")counts: [350 350 350]
weights: {0: 0.002857142857142857, 1: 0.002857142857142857, 2: 0.002857142857142857}
</code>
## Standardize data_____no_output_____We need to standardize our data (zero mean and unit variance) so a specific feature's magnitude doesn't affect how the model learns its weights. We're only going to standardize the inputs X because our outputs y are class values._____no_output_____
<code>
from sklearn.preprocessing import StandardScaler_____no_output_____# Standardize the data (mean=0, std=1) using training data
X_scaler = StandardScaler().fit(X_train)_____no_output_____# Apply scaler on training and test data (don't standardize outputs for classification)
X_train = X_scaler.transform(X_train)
X_val = X_scaler.transform(X_val)
X_test = X_scaler.transform(X_test)_____no_output_____# Check (means should be ~0 and std should be ~1)
print (f"X_test[0]: mean: {np.mean(X_test[:, 0], axis=0):.1f}, std: {np.std(X_test[:, 0], axis=0):.1f}")
print (f"X_test[1]: mean: {np.mean(X_test[:, 1], axis=0):.1f}, std: {np.std(X_test[:, 1], axis=0):.1f}")X_test[0]: mean: 0.1, std: 0.9
X_test[1]: mean: 0.0, std: 1.0
</code>
# Linear model_____no_output_____Before we get to our neural network, we're going to motivate non-linear activation functions by implementing a generalized linear model (logistic regression). We'll see why linear models (with linear activations) won't suffice for our dataset._____no_output_____
<code>
import torch_____no_output_____# Set seed for reproducibility
torch.manual_seed(SEED)_____no_output_____
</code>
## Model_____no_output_____
<code>
from torch import nn
import torch.nn.functional as F_____no_output_____INPUT_DIM = X_train.shape[1] # X is 2-dimensional
HIDDEN_DIM = 100
NUM_CLASSES = len(classes) # 3 classes_____no_output_____class LinearModel(nn.Module):
def __init__(self, input_dim, hidden_dim, num_classes):
super(LinearModel, self).__init__()
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, num_classes)
def forward(self, x_in, apply_softmax=False):
z = self.fc1(x_in) # linear activation
y_pred = self.fc2(z)
if apply_softmax:
y_pred = F.softmax(y_pred, dim=1)
return y_pred_____no_output_____# Initialize model
model = LinearModel(input_dim=INPUT_DIM, hidden_dim=HIDDEN_DIM, num_classes=NUM_CLASSES)
print (model.named_parameters)<bound method Module.named_parameters of LinearModel(
(fc1): Linear(in_features=2, out_features=100, bias=True)
(fc2): Linear(in_features=100, out_features=3, bias=True)
)>
</code>
## Training_____no_output_____
<code>
from torch.optim import Adam_____no_output_____LEARNING_RATE = 1e-2
NUM_EPOCHS = 10
BATCH_SIZE = 32_____no_output_____# Define Loss
class_weights_tensor = torch.Tensor(list(class_weights.values()))
loss_fn = nn.CrossEntropyLoss(weight=class_weights_tensor)_____no_output_____# Accuracy
def accuracy_fn(y_pred, y_true):
n_correct = torch.eq(y_pred, y_true).sum().item()
accuracy = (n_correct / len(y_pred)) * 100
return accuracy_____no_output_____# Optimizer
optimizer = Adam(model.parameters(), lr=LEARNING_RATE) _____no_output_____# Convert data to tensors
X_train = torch.Tensor(X_train)
y_train = torch.LongTensor(y_train)
X_val = torch.Tensor(X_val)
y_val = torch.LongTensor(y_val)
X_test = torch.Tensor(X_test)
y_test = torch.LongTensor(y_test)_____no_output_____# Training
for epoch in range(NUM_EPOCHS):
# Forward pass
y_pred = model(X_train)
# Loss
loss = loss_fn(y_pred, y_train)
# Zero all gradients
optimizer.zero_grad()
# Backward pass
loss.backward()
# Update weights
optimizer.step()
if epoch%1==0:
predictions = y_pred.max(dim=1)[1] # class
accuracy = accuracy_fn(y_pred=predictions, y_true=y_train)
print (f"Epoch: {epoch} | loss: {loss:.2f}, accuracy: {accuracy:.1f}")Epoch: 0 | loss: 1.13, accuracy: 49.9
Epoch: 1 | loss: 0.91, accuracy: 50.3
Epoch: 2 | loss: 0.79, accuracy: 55.3
Epoch: 3 | loss: 0.74, accuracy: 54.6
Epoch: 4 | loss: 0.74, accuracy: 53.7
Epoch: 5 | loss: 0.75, accuracy: 53.6
Epoch: 6 | loss: 0.76, accuracy: 53.7
Epoch: 7 | loss: 0.77, accuracy: 53.8
Epoch: 8 | loss: 0.77, accuracy: 53.9
Epoch: 9 | loss: 0.78, accuracy: 53.9
</code>
## Evaluation_____no_output_____
<code>
import json
import matplotlib.pyplot as plt
from sklearn.metrics import precision_recall_fscore_support_____no_output_____def get_performance(y_true, y_pred, classes):
"""Per-class performance metrics."""
# Performance
performance = {"overall": {}, "class": {}}
# Overall performance
metrics = precision_recall_fscore_support(y_true, y_pred, average="weighted")
performance["overall"]["precision"] = metrics[0]
performance["overall"]["recall"] = metrics[1]
performance["overall"]["f1"] = metrics[2]
performance["overall"]["num_samples"] = np.float64(len(y_true))
# Per-class performance
metrics = precision_recall_fscore_support(y_true, y_pred, average=None)
for i in range(len(classes)):
performance["class"][classes[i]] = {
"precision": metrics[0][i],
"recall": metrics[1][i],
"f1": metrics[2][i],
"num_samples": np.float64(metrics[3][i]),
}
return performance_____no_output_____# Predictions
y_prob = model(X_test, apply_softmax=True)
print (f"sample probability: {y_prob[0]}")
y_pred = y_prob.max(dim=1)[1]
print (f"sample class: {y_pred[0]}")sample probability: tensor([0.8995, 0.0286, 0.0719], grad_fn=<SelectBackward>)
sample class: 0
# Performance report
performance = get_performance(y_true=y_test, y_pred=y_pred, classes=classes)
print (json.dumps(performance, indent=2)){
"overall": {
"precision": 0.5326832791621524,
"recall": 0.5333333333333333,
"f1": 0.5327986224880954,
"num_samples": 225.0
},
"class": {
"c1": {
"precision": 0.5,
"recall": 0.5066666666666667,
"f1": 0.5033112582781457,
"num_samples": 75.0
},
"c2": {
"precision": 0.5211267605633803,
"recall": 0.49333333333333335,
"f1": 0.5068493150684932,
"num_samples": 75.0
},
"c3": {
"precision": 0.5769230769230769,
"recall": 0.6,
"f1": 0.5882352941176471,
"num_samples": 75.0
}
}
}
def plot_multiclass_decision_boundary(model, X, y):
x_min, x_max = X[:, 0].min() - 0.1, X[:, 0].max() + 0.1
y_min, y_max = X[:, 1].min() - 0.1, X[:, 1].max() + 0.1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 101), np.linspace(y_min, y_max, 101))
cmap = plt.cm.Spectral
X_test = torch.from_numpy(np.c_[xx.ravel(), yy.ravel()]).float()
y_pred = model(X_test, apply_softmax=True)
_, y_pred = y_pred.max(dim=1)
y_pred = y_pred.reshape(xx.shape)
plt.contourf(xx, yy, y_pred, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap=plt.cm.RdYlBu)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())_____no_output_____# Visualize the decision boundary
plt.figure(figsize=(12,5))
plt.subplot(1, 2, 1)
plt.title("Train")
plot_multiclass_decision_boundary(model=model, X=X_train, y=y_train)
plt.subplot(1, 2, 2)
plt.title("Test")
plot_multiclass_decision_boundary(model=model, X=X_test, y=y_test)
plt.show()_____no_output_____
</code>
# Activation functions_____no_output_____Using the generalized linear method (logistic regression) yielded poor results because of the non-linearity present in our data yet our activation functions were linear. We need to use an activation function that can allow our model to learn and map the non-linearity in our data. There are many different options so let's explore a few._____no_output_____
<code>
# Fig size
plt.figure(figsize=(12,3))
# Data
x = torch.arange(-5., 5., 0.1)
# Sigmoid activation (constrain a value between 0 and 1.)
plt.subplot(1, 3, 1)
plt.title("Sigmoid activation")
y = torch.sigmoid(x)
plt.plot(x.numpy(), y.numpy())
# Tanh activation (constrain a value between -1 and 1.)
plt.subplot(1, 3, 2)
y = torch.tanh(x)
plt.title("Tanh activation")
plt.plot(x.numpy(), y.numpy())
# Relu (clip the negative values to 0)
plt.subplot(1, 3, 3)
y = F.relu(x)
plt.title("ReLU activation")
plt.plot(x.numpy(), y.numpy())
# Show plots
plt.show()_____no_output_____
</code>
The ReLU activation function ($max(0,z)$) is by far the most widely used activation function for neural networks. But as you can see, each activation function has its own constraints so there are circumstances where you'll want to use different ones. For example, if we need to constrain our outputs between 0 and 1, then the sigmoid activation is the best choice._____no_output_____> In some cases, using a ReLU activation function may not be sufficient. For instance, when the outputs from our neurons are mostly negative, the activation function will produce zeros. This effectively creates a "dying ReLU" and a recovery is unlikely. To mitigate this effect, we could lower the learning rate or use [alternative ReLU activations](https://medium.com/@danqing/a-practical-guide-to-relu-b83ca804f1f7), ex. leaky ReLU or parametric ReLU (PReLU), which have a small slope for negative neuron outputs. _____no_output_____# NumPy
Now let's create our multilayer perceptron (MLP) which is going to be exactly like the logistic regression model but with the activation function to map the non-linearity in our data.
> It's normal to find the math and code in this section slightly complex. You can still read each of the steps to build intuition for when we implement this using PyTorch.
_____no_output_____Our goal is to learn a model 𝑦̂ that models 𝑦 given 𝑋 . You'll notice that neural networks are just extensions of the generalized linear methods we've seen so far but with non-linear activation functions since our data will be highly non-linear.
$z_1 = XW_1$
$a_1 = f(z_1)$
$z_2 = a_1W_2$
$\hat{y} = softmax(z_2)$ # classification
* $X$ = inputs | $\in \mathbb{R}^{NXD}$ ($D$ is the number of features)
* $W_1$ = 1st layer weights | $\in \mathbb{R}^{DXH}$ ($H$ is the number of hidden units in layer 1)
* $z_1$ = outputs from first layer $\in \mathbb{R}^{NXH}$
* $f$ = non-linear activation function
* $a_1$ = activation applied first layer's outputs | $\in \mathbb{R}^{NXH}$
* $W_2$ = 2nd layer weights | $\in \mathbb{R}^{HXC}$ ($C$ is the number of classes)
* $z_2$ = outputs from second layer $\in \mathbb{R}^{NXH}$
* $\hat{y}$ = prediction | $\in \mathbb{R}^{NXC}$ ($N$ is the number of samples)_____no_output_____## Initialize weights_____no_output_____1. Randomly initialize the model's weights $W$ (we'll cover more effective initialization strategies later in this lesson)._____no_output_____
<code>
# Initialize first layer's weights
W1 = 0.01 * np.random.randn(INPUT_DIM, HIDDEN_DIM)
b1 = np.zeros((1, HIDDEN_DIM))
print (f"W1: {W1.shape}")
print (f"b1: {b1.shape}")W1: (2, 100)
b1: (1, 100)
</code>
## Model_____no_output_____2. Feed inputs $X$ into the model to do the forward pass and receive the probabilities._____no_output_____First we pass the inputs into the first layer.
* $z_1 = XW_1$_____no_output_____
<code>
# z1 = [NX2] · [2X100] + [1X100] = [NX100]
z1 = np.dot(X_train, W1) + b1
print (f"z1: {z1.shape}")z1: (1050, 100)
</code>
Next we apply the non-linear activation function, ReLU ($max(0,z)$) in this case.
* $a_1 = f(z_1)$_____no_output_____
<code>
# Apply activation function
a1 = np.maximum(0, z1) # ReLU
print (f"a_1: {a1.shape}")a_1: (1050, 100)
</code>
We pass the activations to the second layer to get our logits.
* $z_2 = a_1W_2$_____no_output_____
<code>
# Initialize second layer's weights
W2 = 0.01 * np.random.randn(HIDDEN_DIM, NUM_CLASSES)
b2 = np.zeros((1, NUM_CLASSES))
print (f"W2: {W2.shape}")
print (f"b2: {b2.shape}")W2: (100, 3)
b2: (1, 3)
# z2 = logits = [NX100] · [100X3] + [1X3] = [NX3]
logits = np.dot(a1, W2) + b2
print (f"logits: {logits.shape}")
print (f"sample: {logits[0]}")logits: (1050, 3)
sample: [-0.00010001 0.00418463 -0.00067274]
</code>
We'll apply the softmax function to normalize the logits and btain class probabilities.
* $\hat{y} = softmax(z_2)$_____no_output_____
<code>
# Normalization via softmax to obtain class probabilities
exp_logits = np.exp(logits)
y_hat = exp_logits / np.sum(exp_logits, axis=1, keepdims=True)
print (f"y_hat: {y_hat.shape}")
print (f"sample: {y_hat[0]}")y_hat: (1050, 3)
sample: [0.33292037 0.33434987 0.33272975]
</code>
## Loss_____no_output_____3. Compare the predictions $\hat{y}$ (ex. [0.3, 0.3, 0.4]) with the actual target values $y$ (ex. class 2 would look like [0, 0, 1]) with the objective (cost) function to determine loss $J$. A common objective function for classification tasks is cross-entropy loss.
* $J(\theta) = - \sum_i ln(\hat{y_i}) = - \sum_i ln (\frac{e^{X_iW_y}}{\sum_j e^{X_iW}}) $_____no_output_____
<code>
# Loss
correct_class_logprobs = -np.log(y_hat[range(len(y_hat)), y_train])
loss = np.sum(correct_class_logprobs) / len(y_train)_____no_output_____
</code>
## Gradients_____no_output_____4. Calculate the gradient of loss $J(\theta)$ w.r.t to the model weights.
The gradient of the loss w.r.t to $W_2$ is the same as the gradients from logistic regression since $\hat{y} = softmax(z_2)$.
* $\frac{\partial{J}}{\partial{W_{2j}}} = \frac{\partial{J}}{\partial{\hat{y}}}\frac{\partial{\hat{y}}}{\partial{W_{2j}}} = - \frac{1}{\hat{y}}\frac{\partial{\hat{y}}}{\partial{W_{2j}}} = - \frac{1}{\frac{e^{W_{2y}a_1}}{\sum_j e^{a_1W}}}\frac{\sum_j e^{a_1W}e^{a_1W_{2y}}0 - e^{a_1W_{2y}}e^{a_1W_{2j}}a_1}{(\sum_j e^{a_1W})^2} = \frac{a_1e^{a_1W_{2j}}}{\sum_j e^{a_1W}} = a_1\hat{y}$
* $\frac{\partial{J}}{\partial{W_{2y}}} = \frac{\partial{J}}{\partial{\hat{y}}}\frac{\partial{\hat{y}}}{\partial{W_{2y}}} = - \frac{1}{\hat{y}}\frac{\partial{\hat{y}}}{\partial{W_{2y}}} = - \frac{1}{\frac{e^{W_{2y}a_1}}{\sum_j e^{a_1W}}}\frac{\sum_j e^{a_1W}e^{a_1W_{2y}}a_1 - e^{a_1W_{2y}}e^{a_1W_{2y}}a_1}{(\sum_j e^{a_1W})^2} = \frac{1}{\hat{y}}(a_1\hat{y} - a_1\hat{y}^2) = a_1(\hat{y}-1)$
The gradient of the loss w.r.t $W_1$ is a bit trickier since we have to backpropagate through two sets of weights.
* $ \frac{\partial{J}}{\partial{W_1}} = \frac{\partial{J}}{\partial{\hat{y}}} \frac{\partial{\hat{y}}}{\partial{a_1}} \frac{\partial{a_1}}{\partial{z_1}} \frac{\partial{z_1}}{\partial{W_1}} = W_2(\partial{scores})(\partial{ReLU})X $_____no_output_____
<code>
# dJ/dW2
dscores = y_hat
dscores[range(len(y_hat)), y_train] -= 1
dscores /= len(y_train)
dW2 = np.dot(a1.T, dscores)
db2 = np.sum(dscores, axis=0, keepdims=True)_____no_output_____# dJ/dW1
dhidden = np.dot(dscores, W2.T)
dhidden[a1 <= 0] = 0 # ReLu backprop
dW1 = np.dot(X_train.T, dhidden)
db1 = np.sum(dhidden, axis=0, keepdims=True)_____no_output_____
</code>
## Update weights_____no_output_____5. Update the weights $W$ using a small learning rate $\alpha$. The updates will penalize the probability for the incorrect classes ($j$) and encourage a higher probability for the correct class ($y$).
* $W_i = W_i - \alpha\frac{\partial{J}}{\partial{W_i}}$_____no_output_____
<code>
# Update weights
W1 += -LEARNING_RATE * dW1
b1 += -LEARNING_RATE * db1
W2 += -LEARNING_RATE * dW2
b2 += -LEARNING_RATE * db2_____no_output_____
</code>
## Training_____no_output_____6. Repeat steps 2 - 4 until model performs well._____no_output_____
<code>
# Convert tensors to NumPy arrays
X_train = X_train.numpy()
y_train = y_train.numpy()
X_val = X_val.numpy()
y_val = y_val.numpy()
X_test = X_test.numpy()
y_test = y_test.numpy()_____no_output_____# Initialize random weights
W1 = 0.01 * np.random.randn(INPUT_DIM, HIDDEN_DIM)
b1 = np.zeros((1, HIDDEN_DIM))
W2 = 0.01 * np.random.randn(HIDDEN_DIM, NUM_CLASSES)
b2 = np.zeros((1, NUM_CLASSES))
# Training loop
for epoch_num in range(1000):
# First layer forward pass [NX2] · [2X100] = [NX100]
z1 = np.dot(X_train, W1) + b1
# Apply activation function
a1 = np.maximum(0, z1) # ReLU
# z2 = logits = [NX100] · [100X3] = [NX3]
logits = np.dot(a1, W2) + b2
# Normalization via softmax to obtain class probabilities
exp_logits = np.exp(logits)
y_hat = exp_logits / np.sum(exp_logits, axis=1, keepdims=True)
# Loss
correct_class_logprobs = -np.log(y_hat[range(len(y_hat)), y_train])
loss = np.sum(correct_class_logprobs) / len(y_train)
# show progress
if epoch_num%100 == 0:
# Accuracy
y_pred = np.argmax(logits, axis=1)
accuracy = np.mean(np.equal(y_train, y_pred))
print (f"Epoch: {epoch_num}, loss: {loss:.3f}, accuracy: {accuracy:.3f}")
# dJ/dW2
dscores = y_hat
dscores[range(len(y_hat)), y_train] -= 1
dscores /= len(y_train)
dW2 = np.dot(a1.T, dscores)
db2 = np.sum(dscores, axis=0, keepdims=True)
# dJ/dW1
dhidden = np.dot(dscores, W2.T)
dhidden[a1 <= 0] = 0 # ReLu backprop
dW1 = np.dot(X_train.T, dhidden)
db1 = np.sum(dhidden, axis=0, keepdims=True)
# Update weights
W1 += -1e0 * dW1
b1 += -1e0 * db1
W2 += -1e0 * dW2
b2 += -1e0 * db2Epoch: 0, loss: 1.099, accuracy: 0.349
Epoch: 100, loss: 0.545, accuracy: 0.687
Epoch: 200, loss: 0.247, accuracy: 0.903
Epoch: 300, loss: 0.142, accuracy: 0.949
Epoch: 400, loss: 0.099, accuracy: 0.974
Epoch: 500, loss: 0.076, accuracy: 0.986
Epoch: 600, loss: 0.062, accuracy: 0.990
Epoch: 700, loss: 0.052, accuracy: 0.994
Epoch: 800, loss: 0.046, accuracy: 0.995
Epoch: 900, loss: 0.041, accuracy: 0.995
</code>
## Evaluation_____no_output_____
<code>
class MLPFromScratch():
def predict(self, x):
z1 = np.dot(x, W1) + b1
a1 = np.maximum(0, z1)
logits = np.dot(a1, W2) + b2
exp_logits = np.exp(logits)
y_hat = exp_logits / np.sum(exp_logits, axis=1, keepdims=True)
return y_hat_____no_output_____# Evaluation
model = MLPFromScratch()
y_prob = model.predict(X_test)
y_pred = np.argmax(y_prob, axis=1)_____no_output_____# Performance report
performance = get_performance(y_true=y_test, y_pred=y_pred, classes=classes)
print (json.dumps(performance, indent=2)){
"overall": {
"precision": 0.9956140350877193,
"recall": 0.9955555555555556,
"f1": 0.9955553580159119,
"num_samples": 225.0
},
"class": {
"c1": {
"precision": 1.0,
"recall": 0.9866666666666667,
"f1": 0.9932885906040269,
"num_samples": 75.0
},
"c2": {
"precision": 1.0,
"recall": 1.0,
"f1": 1.0,
"num_samples": 75.0
},
"c3": {
"precision": 0.9868421052631579,
"recall": 1.0,
"f1": 0.9933774834437086,
"num_samples": 75.0
}
}
}
def plot_multiclass_decision_boundary_numpy(model, X, y, savefig_fp=None):
"""Plot the multiclass decision boundary for a model that accepts 2D inputs.
Credit: https://cs231n.github.io/neural-networks-case-study/
Arguments:
model {function} -- trained model with function model.predict(x_in).
X {numpy.ndarray} -- 2D inputs with shape (N, 2).
y {numpy.ndarray} -- 1D outputs with shape (N,).
"""
# Axis boundaries
x_min, x_max = X[:, 0].min() - 0.1, X[:, 0].max() + 0.1
y_min, y_max = X[:, 1].min() - 0.1, X[:, 1].max() + 0.1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 101),
np.linspace(y_min, y_max, 101))
# Create predictions
x_in = np.c_[xx.ravel(), yy.ravel()]
y_pred = model.predict(x_in)
y_pred = np.argmax(y_pred, axis=1).reshape(xx.shape)
# Plot decision boundary
plt.contourf(xx, yy, y_pred, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap=plt.cm.RdYlBu)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
# Plot
if savefig_fp:
plt.savefig(savefig_fp, format='png')_____no_output_____# Visualize the decision boundary
plt.figure(figsize=(12,5))
plt.subplot(1, 2, 1)
plt.title("Train")
plot_multiclass_decision_boundary_numpy(model=model, X=X_train, y=y_train)
plt.subplot(1, 2, 2)
plt.title("Test")
plot_multiclass_decision_boundary_numpy(model=model, X=X_test, y=y_test)
plt.show()_____no_output_____
</code>
# PyTorch_____no_output_____## Model_____no_output_____We'll be using two linear layers along with PyTorch [Functional](https://pytorch.org/docs/stable/nn.functional.html) API's [ReLU](https://pytorch.org/docs/stable/nn.functional.html#torch.nn.functional.relu) operation. _____no_output_____
<code>
class MLP(nn.Module):
def __init__(self, input_dim, hidden_dim, num_classes):
super(MLP, self).__init__()
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, num_classes)
def forward(self, x_in, apply_softmax=False):
z = F.relu(self.fc1(x_in)) # ReLU activaton function added!
y_pred = self.fc2(z)
if apply_softmax:
y_pred = F.softmax(y_pred, dim=1)
return y_pred_____no_output_____# Initialize model
model = MLP(input_dim=INPUT_DIM, hidden_dim=HIDDEN_DIM, num_classes=NUM_CLASSES)
print (model.named_parameters)<bound method Module.named_parameters of MLP(
(fc1): Linear(in_features=2, out_features=100, bias=True)
(fc2): Linear(in_features=100, out_features=3, bias=True)
)>
</code>
## Training_____no_output_____
<code>
# Define Loss
class_weights_tensor = torch.Tensor(list(class_weights.values()))
loss_fn = nn.CrossEntropyLoss(weight=class_weights_tensor)_____no_output_____# Accuracy
def accuracy_fn(y_pred, y_true):
n_correct = torch.eq(y_pred, y_true).sum().item()
accuracy = (n_correct / len(y_pred)) * 100
return accuracy_____no_output_____# Optimizer
optimizer = Adam(model.parameters(), lr=LEARNING_RATE) _____no_output_____# Convert data to tensors
X_train = torch.Tensor(X_train)
y_train = torch.LongTensor(y_train)
X_val = torch.Tensor(X_val)
y_val = torch.LongTensor(y_val)
X_test = torch.Tensor(X_test)
y_test = torch.LongTensor(y_test)_____no_output_____# Training
for epoch in range(NUM_EPOCHS*10):
# Forward pass
y_pred = model(X_train)
# Loss
loss = loss_fn(y_pred, y_train)
# Zero all gradients
optimizer.zero_grad()
# Backward pass
loss.backward()
# Update weights
optimizer.step()
if epoch%10==0:
predictions = y_pred.max(dim=1)[1] # class
accuracy = accuracy_fn(y_pred=predictions, y_true=y_train)
print (f"Epoch: {epoch} | loss: {loss:.2f}, accuracy: {accuracy:.1f}")Epoch: 0 | loss: 1.11, accuracy: 24.3
Epoch: 10 | loss: 0.67, accuracy: 55.4
Epoch: 20 | loss: 0.51, accuracy: 70.6
Epoch: 30 | loss: 0.39, accuracy: 88.5
Epoch: 40 | loss: 0.29, accuracy: 90.3
Epoch: 50 | loss: 0.22, accuracy: 93.4
Epoch: 60 | loss: 0.18, accuracy: 94.7
Epoch: 70 | loss: 0.15, accuracy: 95.9
Epoch: 80 | loss: 0.12, accuracy: 97.3
Epoch: 90 | loss: 0.11, accuracy: 97.7
</code>
## Evaluation_____no_output_____
<code>
# Predictions
y_prob = model(X_test, apply_softmax=True)
y_pred = y_prob.max(dim=1)[1]_____no_output_____# Performance report
performance = get_performance(y_true=y_test, y_pred=y_pred, classes=classes)
print (json.dumps(performance, indent=2)){
"overall": {
"precision": 0.9913419913419913,
"recall": 0.9911111111111112,
"f1": 0.9911095305832148,
"num_samples": 225.0
},
"class": {
"c1": {
"precision": 1.0,
"recall": 0.9733333333333334,
"f1": 0.9864864864864865,
"num_samples": 75.0
},
"c2": {
"precision": 1.0,
"recall": 1.0,
"f1": 1.0,
"num_samples": 75.0
},
"c3": {
"precision": 0.974025974025974,
"recall": 1.0,
"f1": 0.9868421052631579,
"num_samples": 75.0
}
}
}
# Visualize the decision boundary
plt.figure(figsize=(12,5))
plt.subplot(1, 2, 1)
plt.title("Train")
plot_multiclass_decision_boundary(model=model, X=X_train, y=y_train)
plt.subplot(1, 2, 2)
plt.title("Test")
plot_multiclass_decision_boundary(model=model, X=X_test, y=y_test)
plt.show()_____no_output_____
</code>
## Inference_____no_output_____
<code>
# Inputs for inference
X_infer = pd.DataFrame([{'X1': 0.1, 'X2': 0.1}])
X_infer.head()_____no_output_____# Standardize
X_infer = X_scaler.transform(X_infer)
print (X_infer)[[0.29906749 0.30544029]]
# Predict
y_infer = model(torch.Tensor(X_infer), apply_softmax=True)
prob, _class = y_infer.max(dim=1)
label = label_encoder.inverse_transform(_class.detach().numpy())[0]
print (f"The probability that you have {label} is {prob.detach().numpy()[0]*100.0:.0f}%")The probability that you have c1 is 92%
</code>
# Initializing weights_____no_output_____So far we have been initializing weights with small random values and this isn't optimal for convergence during training. The objective is to have weights that are able to produce outputs that follow a similar distribution across all neurons. We can do this by enforcing weights to have unit variance prior the affine and non-linear operations._____no_output_____> A popular method is to apply [xavier initialization](http://andyljones.tumblr.com/post/110998971763/an-explanation-of-xavier-initialization), which essentially initializes the weights to allow the signal from the data to reach deep into the network. You may be wondering why we don't do this for every forward pass and that's a great question. We'll look at more advanced strategies that help with optimization like batch/layer normalization, etc. in future lessons. Meanwhile you can check out other initializers [here](https://pytorch.org/docs/stable/nn.init.html)._____no_output_____
<code>
from torch.nn import init_____no_output_____class MLP(nn.Module):
def __init__(self, input_dim, hidden_dim, num_classes):
super(MLP, self).__init__()
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, num_classes)
def init_weights(self):
init.xavier_normal(self.fc1.weight, gain=init.calculate_gain('relu'))
def forward(self, x_in, apply_softmax=False):
z = F.relu(self.fc1(x_in)) # ReLU activaton function added!
y_pred = self.fc2(z)
if apply_softmax:
y_pred = F.softmax(y_pred, dim=1)
return y_pred_____no_output_____
</code>
# Dropout_____no_output_____A great technique to have our models generalize (perform well on test data) is to increase the size of your data but this isn't always an option. Fortuntely, there are methods like regularization and dropout that can help create a more robust model.
Dropout is a technique (used only during training) that allows us to zero the outputs of neurons. We do this for `dropout_p`% of the total neurons in each layer and it changes every batch. Dropout prevents units from co-adapting too much to the data and acts as a sampling strategy since we drop a different set of neurons each time.
<div align="left">
<img src="https://raw.githubusercontent.com/GokuMohandas/MadeWithML/main/images/basics/neural-networks/dropout.png" width="350">
</div>
* [Dropout: A Simple Way to Prevent Neural Networks from
Overfitting](http://jmlr.org/papers/volume15/srivastava14a/srivastava14a.pdf)_____no_output_____
<code>
DROPOUT_P = 0.1 # % of the neurons that are dropped each pass_____no_output_____class MLP(nn.Module):
def __init__(self, input_dim, hidden_dim, dropout_p, num_classes):
super(MLP, self).__init__()
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.dropout = nn.Dropout(dropout_p) # dropout
self.fc2 = nn.Linear(hidden_dim, num_classes)
def init_weights(self):
init.xavier_normal(self.fc1.weight, gain=init.calculate_gain('relu'))
def forward(self, x_in, apply_softmax=False):
z = F.relu(self.fc1(x_in))
z = self.dropout(z) # dropout
y_pred = self.fc2(z)
if apply_softmax:
y_pred = F.softmax(y_pred, dim=1)
return y_pred_____no_output_____# Initialize model
model = MLP(input_dim=INPUT_DIM, hidden_dim=HIDDEN_DIM,
dropout_p=DROPOUT_P, num_classes=NUM_CLASSES)
print (model.named_parameters)<bound method Module.named_parameters of MLP(
(fc1): Linear(in_features=2, out_features=100, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
(fc2): Linear(in_features=100, out_features=3, bias=True)
)>
</code>
# Overfitting_____no_output_____Though neural networks are great at capturing non-linear relationships they are highly susceptible to overfitting to the training data and failing to generalize on test data. Just take a look at the example below where we generate completely random data and are able to fit a model with [$2*N*C + D$](https://arxiv.org/abs/1611.03530) hidden units. The training performance is good (~70%) but the overfitting leads to very poor test performance. We'll be covering strategies to tackle overfitting in future lessons._____no_output_____
<code>
NUM_EPOCHS = 500
NUM_SAMPLES_PER_CLASS = 50
LEARNING_RATE = 1e-1
HIDDEN_DIM = 2 * NUM_SAMPLES_PER_CLASS * NUM_CLASSES + INPUT_DIM # 2*N*C + D_____no_output_____# Generate random data
X = np.random.rand(NUM_SAMPLES_PER_CLASS * NUM_CLASSES, INPUT_DIM)
y = np.array([[i]*NUM_SAMPLES_PER_CLASS for i in range(NUM_CLASSES)]).reshape(-1)
print ("X: ", format(np.shape(X)))
print ("y: ", format(np.shape(y)))X: (150, 2)
y: (150,)
# Create data splits
X_train, X_val, X_test, y_train, y_val, y_test = train_val_test_split(
X=X, y=y, train_size=TRAIN_SIZE)
print (f"X_train: {X_train.shape}, y_train: {y_train.shape}")
print (f"X_val: {X_val.shape}, y_val: {y_val.shape}")
print (f"X_test: {X_test.shape}, y_test: {y_test.shape}")
print (f"Sample point: {X_train[0]} → {y_train[0]}")X_train: (105, 2), y_train: (105,)
X_val: (22, 2), y_val: (22,)
X_test: (23, 2), y_test: (23,)
Sample point: [0.52553355 0.33956916] → 0
# Standardize the inputs (mean=0, std=1) using training data
X_scaler = StandardScaler().fit(X_train)
X_train = X_scaler.transform(X_train)
X_val = X_scaler.transform(X_val)
X_test = X_scaler.transform(X_test)_____no_output_____# Convert data to tensors
X_train = torch.Tensor(X_train)
y_train = torch.LongTensor(y_train)
X_val = torch.Tensor(X_val)
y_val = torch.LongTensor(y_val)
X_test = torch.Tensor(X_test)
y_test = torch.LongTensor(y_test)_____no_output_____# Initialize model
model = MLP(input_dim=INPUT_DIM, hidden_dim=HIDDEN_DIM,
dropout_p=DROPOUT_P, num_classes=NUM_CLASSES)
print (model.named_parameters)<bound method Module.named_parameters of MLP(
(fc1): Linear(in_features=2, out_features=302, bias=True)
(dropout): Dropout(p=0.1, inplace=False)
(fc2): Linear(in_features=302, out_features=3, bias=True)
)>
# Optimizer
optimizer = Adam(model.parameters(), lr=LEARNING_RATE) _____no_output_____# Training
for epoch in range(NUM_EPOCHS):
# Forward pass
y_pred = model(X_train)
# Loss
loss = loss_fn(y_pred, y_train)
# Zero all gradients
optimizer.zero_grad()
# Backward pass
loss.backward()
# Update weights
optimizer.step()
if epoch%20==0:
predictions = y_pred.max(dim=1)[1] # class
accuracy = accuracy_fn(y_pred=predictions, y_true=y_train)
print (f"Epoch: {epoch} | loss: {loss:.2f}, accuracy: {accuracy:.1f}")Epoch: 0 | loss: 1.15, accuracy: 37.1
Epoch: 20 | loss: 1.04, accuracy: 47.6
Epoch: 40 | loss: 0.98, accuracy: 51.4
Epoch: 60 | loss: 0.90, accuracy: 57.1
Epoch: 80 | loss: 0.87, accuracy: 59.0
Epoch: 100 | loss: 0.88, accuracy: 58.1
Epoch: 120 | loss: 0.84, accuracy: 64.8
Epoch: 140 | loss: 0.86, accuracy: 61.0
Epoch: 160 | loss: 0.81, accuracy: 64.8
Epoch: 180 | loss: 0.89, accuracy: 59.0
Epoch: 200 | loss: 0.91, accuracy: 60.0
Epoch: 220 | loss: 0.82, accuracy: 63.8
Epoch: 240 | loss: 0.86, accuracy: 59.0
Epoch: 260 | loss: 0.77, accuracy: 66.7
Epoch: 280 | loss: 0.82, accuracy: 67.6
Epoch: 300 | loss: 0.88, accuracy: 57.1
Epoch: 320 | loss: 0.81, accuracy: 61.9
Epoch: 340 | loss: 0.79, accuracy: 63.8
Epoch: 360 | loss: 0.80, accuracy: 61.0
Epoch: 380 | loss: 0.86, accuracy: 64.8
Epoch: 400 | loss: 0.77, accuracy: 64.8
Epoch: 420 | loss: 0.79, accuracy: 64.8
Epoch: 440 | loss: 0.81, accuracy: 65.7
Epoch: 460 | loss: 0.77, accuracy: 70.5
Epoch: 480 | loss: 0.80, accuracy: 67.6
# Predictions
y_prob = model(X_test, apply_softmax=True)
y_pred = y_prob.max(dim=1)[1]_____no_output_____# Performance report
performance = get_performance(y_true=y_test, y_pred=y_pred, classes=classes)
print (json.dumps(performance, indent=2)){
"overall": {
"precision": 0.17857142857142858,
"recall": 0.16666666666666666,
"f1": 0.1722222222222222,
"num_samples": 23.0
},
"class": {
"c1": {
"precision": 0.0,
"recall": 0.0,
"f1": 0.0,
"num_samples": 7.0
},
"c2": {
"precision": 0.2857142857142857,
"recall": 0.25,
"f1": 0.26666666666666666,
"num_samples": 8.0
},
"c3": {
"precision": 0.25,
"recall": 0.25,
"f1": 0.25,
"num_samples": 8.0
}
}
}
# Visualize the decision boundary
plt.figure(figsize=(12,5))
plt.subplot(1, 2, 1)
plt.title("Train")
plot_multiclass_decision_boundary(model=model, X=X_train, y=y_train)
plt.subplot(1, 2, 2)
plt.title("Test")
plot_multiclass_decision_boundary(model=model, X=X_test, y=y_test)
plt.show()_____no_output_____
</code>
It's important that we experiment, starting with simple models that underfit (high bias) and improve it towards a good fit. Starting with simple models (linear/logistic regression) let's us catch errors without the added complexity of more sophisticated models (neural networks). _____no_output_____<div align="left">
<img src="https://raw.githubusercontent.com/GokuMohandas/MadeWithML/main/images/basics/neural-networks/fit.png" width="700">
</div>_____no_output_____
| {
"repository": "Zanah-Tech/MadeWithML",
"path": "notebooks/08_Neural_Networks.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 578904,
"hexsha": "d0bf0258567146cb1e0e486e7cde569f4c239cf0",
"max_line_length": 578904,
"avg_line_length": 578904,
"alphanum_fraction": 0.9289174025
} |
# Notebook from isacco-v/hit-song-prediction
Path: dataset_integration_3.ipynb
#Importo librerie_____no_output_____
<code>
import pandas as pd
import numpy as np
import concurrent.futures
import time
from requests.exceptions import ReadTimeout_____no_output_____!pip install -U -q PyDrive_____no_output_____from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from google.colab import auth
from oauth2client.client import GoogleCredentials_____no_output_____
</code>
#Autenticazione Spotify API_____no_output_____
<code>
!pip uninstall spotipy_____no_output_____!pip install spotipyCollecting spotipy
Downloading https://files.pythonhosted.org/packages/fb/69/21f1ccc881438bdfa1056ea131b6ac2b1cfbe656cf3676b6167d3cbc4d69/spotipy-2.17.1-py3-none-any.whl
Collecting requests>=2.25.0
[?25l Downloading https://files.pythonhosted.org/packages/29/c1/24814557f1d22c56d50280771a17307e6bf87b70727d975fd6b2ce6b014a/requests-2.25.1-py2.py3-none-any.whl (61kB)
[K |████████████████████████████████| 61kB 3.5MB/s
[?25hRequirement already satisfied: six>=1.15.0 in /usr/local/lib/python3.7/dist-packages (from spotipy) (1.15.0)
Collecting urllib3>=1.26.0
[?25l Downloading https://files.pythonhosted.org/packages/09/c6/d3e3abe5b4f4f16cf0dfc9240ab7ce10c2baa0e268989a4e3ec19e90c84e/urllib3-1.26.4-py2.py3-none-any.whl (153kB)
[K |████████████████████████████████| 153kB 11.3MB/s
[?25hRequirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests>=2.25.0->spotipy) (3.0.4)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests>=2.25.0->spotipy) (2020.12.5)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests>=2.25.0->spotipy) (2.10)
[31mERROR: google-colab 1.0.0 has requirement requests~=2.23.0, but you'll have requests 2.25.1 which is incompatible.[0m
[31mERROR: datascience 0.10.6 has requirement folium==0.2.1, but you'll have folium 0.8.3 which is incompatible.[0m
Installing collected packages: urllib3, requests, spotipy
Found existing installation: urllib3 1.24.3
Uninstalling urllib3-1.24.3:
Successfully uninstalled urllib3-1.24.3
Found existing installation: requests 2.23.0
Uninstalling requests-2.23.0:
Successfully uninstalled requests-2.23.0
Successfully installed requests-2.25.1 spotipy-2.17.1 urllib3-1.26.4
# autenticazione Spotify API con spotipy
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
auth_manager = SpotifyClientCredentials(client_id='caf57b996b464996bff50ab59186f265', client_secret='0bfcdbff8015426cae855b56b692f69b')
sp = spotipy.Spotify(auth_manager=auth_manager, requests_timeout=10)_____no_output_____
</code>
#Importo dataset_____no_output_____
<code>
# autenticazione google drive
auth.authenticate_user()
gauth = GoogleAuth()
gauth.credentials = GoogleCredentials.get_application_default()
drive = GoogleDrive(gauth)_____no_output_____# importo quarta parte del dataset --> 7087 datapoints
drive.CreateFile({'id':'1YAAvPUaPeBIddkuVzWgw7fhPPcT2uJTY'}).GetContentFile('billboard_dataset_unique.csv')
df_billboard = pd.read_csv("billboard_dataset_unique.csv").drop('Unnamed: 0',axis=1).iloc[21261:]
drive.CreateFile({'id':'1eOqgPk_izGXKIT5y6KfqPkmKWqBonVc0'}).GetContentFile('dataset2_X_billboard.csv')
df_songs = pd.read_csv("dataset2_X_billboard.csv").drop('Unnamed: 0',axis=1)
# df_billboard.iloc[:7087]
# df_billboard.iloc[7087:14174]
# df_billboard.iloc[14174:21261]
# df_billboard.iloc[21261:]_____no_output_____df_billboard.head()_____no_output_____df_billboard.shape_____no_output_____
</code>
#Definizione funzioni_____no_output_____
<code>
def print_exec_time(start):
print("Esecuzione completata in %.4f secondi" % (time.perf_counter()-start))_____no_output_____# funzione che effettua ricerca con Spotify API considerando i casi in cui nel campo 'artist' siano presenti più artisti (featuring)
def search_fix(artist, title):
artist_separators = ['%%%', ' Featuring', ' featuring', ' feat.', ' Feat.', ' feat', ' Feat', ' &', ' x', ' X', ' with', ' With', ', ', '/', ' duet', ' Duet', '+', ' and']
title_separators = ['%%%', ' (']
title_fix = ["%%%", "'s", "'"]
id = None
for x in artist_separators:
for y in title_separators:
for z in title_fix:
try:
id = sp.search(q='artist:'+artist.split(x)[0]+' track:'+title.split(y)[0].replace(z, ''), type='track', limit=1)['tracks']['items'][0]['id']
except IndexError:
pass
if(id != None):
break
if(id != None):
break
if(id != None):
break
return id_____no_output_____# funzione che prendendo una singola riga del Billboard dataset restituisce una lista con id, artista e titolo
# --> in caso di errore l'id viene impostato a None
def get_id(row):
artist = row[1]
title = row[0]
print("fetching id for %s by %s ..." % (title, artist))
try:
try:
id = sp.search(q='artist:'+artist+' track:'+title, type='track', limit=1)['tracks']['items'][0]['id']
except IndexError:
id = search_fix(artist, title)
except ReadTimeout:
id = None
if(id == None):
print('--> [error] %s by %s' % (title, artist))
return [id, artist, title]_____no_output_____# funzione che, preso un id, restituisce un array con le features (audio e non) della traccia corrispondente
def get_features(id):
print("fetching features for id: %s" % id)
# audio features
danceability = []
energy = []
key = []
loudness =[]
mode = []
speechiness = []
acousticness = []
instrumentalness = []
liveness = []
valence = []
tempo = []
duration_ms = []
audio_features_array = [danceability, energy, key, loudness, mode, speechiness,
acousticness, instrumentalness, liveness, valence, tempo, duration_ms]
# altre features
release_date = []
explicit = []
release_date.append(sp.track(id)['album']['release_date'])
explicit.append(sp.track(id)['explicit'])
audio_features = sp.audio_features(id)[0]
try:
# rimuovo campi non necessari
to_remove = ['type', 'id', 'uri', 'track_href', 'analysis_url', 'time_signature']
for rmv in to_remove:
audio_features.pop(rmv)
for i, feature in enumerate(audio_features.keys()):
audio_features_array[i].append(audio_features[feature])
except AttributeError:
print("--> [error] id = %s" % id)
for i in range(12):
audio_features_array[i].append(None)
audio_features_array.append(release_date)
audio_features_array.append(explicit)
return audio_features_array_____no_output_____
</code>
#Integrazione dataset_____no_output_____##Recupero gli id del Billboard dataset_____no_output_____
<code>
time_0 = time.perf_counter()
with concurrent.futures.ProcessPoolExecutor() as executor:
results = executor.map(get_id, df_billboard.values.tolist())
output = []
for result in results:
output.append(result)
print_exec_time(time_0)[1;30;43mOutput streaming troncato alle ultime 5000 righe.[0m
fetching id for Keep On Running by The Spencer Davis Group ...
fetching id for (I'm Just A) Fool For You by Gene Chandler ...
fetching id for The Boogaloo Party by The Flamingos ...
--> [error] Uptight (Everything's Alright) by The Jazz Crusaders
fetching id for Baby I Need You by The Manhattans ...
fetching id for Sharing You by Mitty Collier ...
fetching id for I Spy (For The FBI) by Jamo Thomas & His Party Brothers Orchestra ...
--> [error] The Boogaloo Party by The Flamingos
fetching id for When Liking Turns To Loving by Ronnie Dove ...
fetching id for Don't Mess With Bill by The Marvelettes ...
fetching id for What Now My Love by Sonny & Cher ...
fetching id for At The Scene by The Dave Clark Five ...
fetching id for Uptight (Everything's Alright) by Stevie Wonder ...
fetching id for My Love by Petula Clark ...
fetching id for Crying Time by Ray Charles ...
--> [error] I Spy (For The FBI) by Jamo Thomas & His Party Brothers Orchestra
fetching id for Up And Down by The McCoys ...
fetching id for In My Room (El Amor) by Verdelle Smith ...
fetching id for What Goes On by The Beatles ...
fetching id for Moulty by The Barbarians ...
--> [error] Crying Time by Ray Charles
fetching id for Big Time by Lou Christie ...
fetching id for Call Me by Chris Montez ...
fetching id for I See The Light by The Five Americans ...
fetching id for Barbara Ann by The Beach Boys ...
fetching id for Zorba The Greek by Herb Alpert & The Tijuana Brass ...
fetching id for Just Like Me by Paul Revere & The Raiders Featuring Mark Lindsay ...
fetching id for Going To A Go-Go by The Miracles ...
fetching id for Breakin' Up Is Breakin' My Heart by Roy Orbison ...
fetching id for Long Live Our Love by The Shangri-Las ...
fetching id for Andrea by The Sunrays ...
--> [error] Moulty by The Barbarians
fetching id for Love Is All We Need by Mel Carter ...
fetching id for Batman by Jan & Dean ...
fetching id for It Won't Be Wrong by The Byrds ...
fetching id for Waitin' In Your Welfare Line by Buck Owens ...
fetching id for Hide & Seek by The Sheep ...
--> [error] Andrea by The Sunrays
fetching id for Promise Her Anything by Tom Jones ...
fetching id for Smokey Joe's La La by Googie Rene Combo ...
fetching id for Take Me For What I'm Worth by The Searchers ...
fetching id for I Confess by New Colony Six ...
fetching id for This Golden Ring by The Fortunes ...
fetching id for Shake Hands (And Come Out Crying) by The Newbeats ...
fetching id for Superman by Dino, Desi & Billy ...
fetching id for Put Yourself In My Place by The Elgins ...
fetching id for My Babe by Roy Head And The Traits ...
fetching id for No Matter What Shape (Your Stomach's In) by The T-Bones ...
fetching id for A Well Respected Man by The Kinks ...
fetching id for Michelle by David & Jonathan ...
fetching id for Night Time by The Strangeloves ...
fetching id for We Can Work It Out by The Beatles ...
fetching id for Get Out Of My Life, Woman by Lee Dorsey ...
fetching id for Bye Bye Blues by Bert Kaempfert And His Orchestra ...
--> [error] Hide & Seek by The Sheep
fetching id for A Little Bit Of Soap by The Exciters ...
--> [error] Bye Bye Blues by Bert Kaempfert And His Orchestra
fetching id for Georgia On My Mind by The Righteous Brothers ...
fetching id for Red Hot by Sam The Sham and the Pharaohs ...
fetching id for Lies by The Knickerbockers ...
fetching id for Jenny Take A Ride! by Mitch Ryder And The Detroit Wheels ...
fetching id for A Hard Day's Night by Ramsey Lewis Trio ...
fetching id for Five O'Clock World by The Vogues ...
fetching id for She's Just My Style by Gary Lewis And The Playboys ...
fetching id for Like A Baby by Len Barry ...
fetching id for (You're Gonna) Hurt Yourself by Frankie Valli ...
fetching id for My Ship Is Comin' In by The Walker Bros. ...
--> [error] A Little Bit Of Soap by The Exciters
fetching id for Michelle by Bud Shank ...
fetching id for I'll Go Crazy by James Brown And The Famous Flames ...
fetching id for Set You Free This Time by The Byrds ...
fetching id for The Answer To My Prayer by Neil Sedaka ...
fetching id for Where Am I Going? by Barbra Streisand ...
fetching id for Feel It by Sam Cooke ...
fetching id for The Men In My Little Girl's Life by Mike Douglas ...
fetching id for The Sound Of Silence by Simon & Garfunkel ...
fetching id for A Must To Avoid by Herman's Hermits ...
fetching id for The Duck by Jackie Lee ...
fetching id for Spanish Eyes by Al Martino ...
fetching id for Day Tripper by The Beatles ...
fetching id for As Tears Go By by The Rolling Stones ...
fetching id for Cleo's Mood by Jr. Walker & The All Stars ...
fetching id for Spread It On Thick by The Gentrys ...
--> [error] My Ship Is Comin' In by The Walker Bros.
fetching id for Snow Flake by Jim Reeves ...
fetching id for I Ain't Gonna Eat Out My Heart Anymore by The Young Rascals ...
fetching id for We Know We're In Love by Lesley Gore ...
fetching id for The Loop by Johnny Lytle ...
fetching id for Something I Want To Tell You by Johnny and The Expressions ...
fetching id for Tijuana Taxi by Herb Alpert & The Tijuana Brass ...
fetching id for You Didn't Have To Be So Nice by The Lovin' Spoonful ...
fetching id for Sandy by Ronny And The Daytonas ...
fetching id for It Was A Very Good Year by Frank Sinatra ...
fetching id for Attack by The Toys ...
fetching id for Tell Me Why by Elvis Presley With The Jordanaires ...
--> [error] Spread It On Thick by The Gentrys
fetching id for Are You There (With Another Girl) by Dionne Warwick ...
fetching id for Second Hand Rose by Barbra Streisand ...
fetching id for Michael by The C.O.D.'s ...
fetching id for I'm Too Far Gone (To Turn Around) by Bobby Bland ...
fetching id for Recovery by Fontella Bass ...
fetching id for My Generation by The Who ...
fetching id for Michelle by Billy Vaughn And His Orchestra ...
fetching id for Since I Lost The One I Love by The Impressions ...
fetching id for Don't Forget About Me by Barbara Lewis ...
fetching id for Lost Someone by James Brown And The Famous Flames ...
fetching id for Is It Me? by Barbara Mason ...
fetching id for We Got The Winning Hand by Little Milton ...
fetching id for Flowers On The Wall by The Statler Brothers ...
fetching id for Thunderball by Tom Jones ...
fetching id for Satin Pillows by Bobby Vinton ...
fetching id for Look Through Any Window by The Hollies ...
fetching id for It's Good News Week by Hedgehoppers Anonymous ...
fetching id for Under Your Spell Again by Johnny Rivers ...
fetching id for Hurt by Little Anthony And The Imperials ...
fetching id for Can You Please Crawl Out Your Window? by Bob Dylan ...
fetching id for If You Gotta Make A Fool Of Somebody by Maxine Brown ...
fetching id for I Can't Believe You Love Me by Tammi Terrell ...
fetching id for Can't You See (You're Losing Me) by Mary Wells ...
fetching id for Ebb Tide by The Righteous Brothers ...
fetching id for Over And Over by The Dave Clark Five ...
fetching id for I Got You (I Feel Good) by James Brown And The Famous Flames ...
fetching id for A Sweet Woman Like You by Joe Tex ...
fetching id for Broomstick Cowboy by Bobby Goldsboro ...
fetching id for Little Boy (In Grown Up Clothes) by The 4 Seasons ...
fetching id for Rainbow '65 (Part I) by Gene Chandler ...
fetching id for Tired Of Being Lonely by Sharpees ...
--> [error] Michelle by Billy Vaughn And His Orchestra
fetching id for The Pain Gets A Little Deeper by Darrow Fletcher ...
fetching id for My Answer by Jimmy McCracklin ...
fetching id for Rib Tip's (Part 1) by Andre Williams & His Orch. ...
fetching id for Because I Love You by Billy Stewart ...
fetching id for Fly Me To The Moon by Sam & Bill ...
--> [error] Tired Of Being Lonely by Sharpees
fetching id for It's My Life by The Animals ...
fetching id for Fever by The McCoys ...
fetching id for Turn! Turn! Turn! (To Everything There Is A Season) by The Byrds ...
fetching id for Let's Hang On! by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Harlem Nocturne by The Viscounts ...
fetching id for I've Got To Be Somebody by Billy Joe Royal ...
fetching id for A Young Girl by Noel Harrison ...
fetching id for Love (Makes Me Do Foolish Things) by Martha & The Vandellas ...
fetching id for Please Don't Fight It by Dino, Desi & Billy ...
fetching id for Hole In The Wall by The Packers ...
fetching id for Giddyup Go by Red Sovine ...
fetching id for Get Back by Roy Head ...
fetching id for You Don't Know Like I Know by Sam & Dave ...
fetching id for Goodnight My Love by Ben E. King ...
fetching id for Spanish Harlem by King Curtis ...
fetching id for Where The Sun Has Never Shone by Jonathan King ...
fetching id for Make The World Go Away by Eddy Arnold ...
fetching id for Don't Think Twice by The Wonder Who? ...
--> [error] A Young Girl by Noel Harrison
fetching id for England Swings by Roger Miller ...
fetching id for One Has My Name (The Other Has My Heart) by Barry Young ...
fetching id for Puppet On A String by Elvis Presley With The Jordanaires ...
--> [error] Don't Think Twice by The Wonder Who?
fetching id for I Can Never Go Home Anymore by The Shangri-Las ...
fetching id for You've Been Cheatin' by The Impressions ...
fetching id for The Little Girl I Once Knew by The Beach Boys ...
fetching id for Jealous Heart by Connie Francis ...
fetching id for Crystal Chandelier by Vic Dana ...
fetching id for C.C. Rider by Bobby Powell ...
fetching id for Love Bug by Jack Jones ...
fetching id for Don't Look Back by The Temptations ...
fetching id for Think Twice by Jackie Wilson And LaVern Baker ...
fetching id for Baby Come On Home by Solomon Burke ...
fetching id for Sunday And Me by Jay & The Americans ...
fetching id for Taste Of Honey by Herb Alpert & The Tijuana Brass ...
fetching id for Hang On Sloopy by Ramsey Lewis Trio ...
fetching id for Apple Of My Eye by Roy Head And The Traits ...
--> [error] Love Bug by Jack Jones
fetching id for Princess In Rags by Gene Pitney ...
fetching id for Try Me by James Brown At The Organ ...
--> [error] Apple Of My Eye by Roy Head And The Traits
fetching id for Don't Fight It by Wilson Pickett ...
fetching id for Buckaroo by Buck Owens and The Buckaroos ...
fetching id for All Or Nothing by Patti LaBelle And The Blue Belles ...
--> [error] Try Me by James Brown At The Organ
fetching id for Seesaw by Don Covay & The Goodtimers ...
--> [error] All Or Nothing by Patti LaBelle And The Blue Belles
fetching id for A Time To Love-A Time To Cry (Petite Fleur) by Lou Johnson ...
--> [error] Seesaw by Don Covay & The Goodtimers
fetching id for Follow Your Heart by The Manhattans ...
fetching id for Black Nights by Lowell Fulsom ...
fetching id for I Hear A Symphony by The Supremes ...
fetching id for I Will by Dean Martin ...
fetching id for I'm A Man by The Yardbirds ...
fetching id for Let's Get Together by We Five ...
fetching id for 1-2-3 by Len Barry ...
fetching id for Just One More Day by Otis Redding ...
fetching id for Yesterday Man by Chris Andrews ...
fetching id for Blue River by Elvis Presley ...
fetching id for Look In My Eyes by The Three Degrees ...
fetching id for Mountain Of Love by Billy Stewart ...
fetching id for Something About You by Four Tops ...
--> [error] A Time To Love-A Time To Cry (Petite Fleur) by Lou Johnson
fetching id for Get Off Of My Cloud by The Rolling Stones ...
fetching id for Run, Baby Run (Back Into My Arms) by The Newbeats ...
fetching id for Rescue Me by Fontella Bass ...
fetching id for Ain't That Peculiar by Marvin Gaye ...
fetching id for Here It Comes Again by The Fortunes ...
fetching id for Kiss Away by Ronnie Dove ...
fetching id for Mother Nature, Father Time by Brook Benton ...
fetching id for Our World by Johnny Tillotson ...
fetching id for Everybody Do The Sloopy by Johnny Thunder ...
fetching id for Go Away From My World by Marianne Faithfull ...
fetching id for Good Time Music by The Beau Brummels ...
fetching id for On A Clear Day You Can See Forever by Johnny Mathis ...
fetching id for Love Theme From "The Sandpiper" (The Shadow Of Your Smile) by Tony Bennett ...
fetching id for You've Got To Hide Your Love Away by The Silkie ...
fetching id for Make It Easy On Yourself by The Walker Bros. ...
--> [error] You've Got To Hide Your Love Away by The Silkie
fetching id for Mystic Eyes by Them ...
fetching id for May The Bird Of Paradise Fly Up Your Nose by "Little" Jimmy Dickens ...
fetching id for A Lover's Concerto by The Toys ...
--> [error] Make It Easy On Yourself by The Walker Bros.
fetching id for Crawling Back by Roy Orbison ...
fetching id for The Revolution Kind by Sonny ...
fetching id for I Really Love You by Dee Dee Sharp ...
fetching id for I Won't Love You Anymore (Sorry) by Lesley Gore ...
fetching id for I'm Satisfied by The San Remo Golden Strings ...
fetching id for Quiet Nights Of Quiet Stars by Andy Williams ...
fetching id for Back Street by Edwin Starr ...
fetching id for Do I Make Myself Clear by Etta James & Sugar Pie DeSanto ...
--> [error] I'm Satisfied by The San Remo Golden Strings
fetching id for The Drinking Man's Diet by Allan Sherman ...
fetching id for My Baby by The Temptations ...
fetching id for Let Me Be by The Turtles ...
fetching id for My Girl Has Gone by The Miracles ...
--> [error] Do I Make Myself Clear by Etta James & Sugar Pie DeSanto
fetching id for Just A Little Bit by Roy Head ...
fetching id for (All Of A Sudden) My Heart Sings by Mel Carter ...
fetching id for Road Runner by The Gants ...
fetching id for Sinner Man by Trini Lopez ...
fetching id for I Want To Meet Him by The Royalettes ...
fetching id for Stand By Me by Earl Grant ...
fetching id for Run To My Lovin' Arms by Lenny Welch ...
--> [error] Run To My Lovin' Arms by Lenny Welch
fetching id for Stay Away From My Baby by Ted Taylor ...
--> [error] I Want To Meet Him by The Royalettes
fetching id for Keep On Dancing by The Gentrys ...
fetching id for You're The One by The Vogues ...
fetching id for Everyone's Gone To The Moon by Jonathan King ...
fetching id for Everybody Loves A Clown by Gary Lewis And The Playboys ...
fetching id for Yesterday by The Beatles ...
fetching id for Ring Dang Doo by Sam The Sham and the Pharaohs ...
fetching id for Rusty Bells by Brenda Lee ...
fetching id for He Touched Me by Barbra Streisand ...
fetching id for Dance With Me by The Mojo Men ...
fetching id for Misty by The Vibrations ...
fetching id for Let's Move & Groove (Together) by Johnny Nash ...
fetching id for She's With Her Other Love by Leon Hayward ...
--> [error] Rusty Bells by Brenda Lee
fetching id for Only Love (Can Save Me Now) by Solomon Burke ...
fetching id for But You're Mine by Sonny & Cher ...
fetching id for I Knew You When by Billy Joe Royal ...
fetching id for Make Me Your Baby by Barbara Lewis ...
fetching id for I Found A Girl by Jan & Dean ...
fetching id for Where Do You Go by Cher ...
--> [error] She's With Her Other Love by Leon Hayward
fetching id for Round Every Corner by Petula Clark ...
fetching id for Positively 4th Street by Bob Dylan ...
fetching id for Cleo's Back by Jr. Walker & The All Stars ...
fetching id for Where Have All The Flowers Gone by Johnny Rivers ...
fetching id for Don't Talk To Strangers by The Beau Brummels ...
fetching id for Child Of Our Times by Barry McGuire ...
fetching id for Honky Tonk '65 by Lonnie Mack ...
fetching id for Don't Pity Me by Peter And Gordon ...
fetching id for Pied Piper by The Changin' Times ...
fetching id for Let The Good Times Roll by Roy Orbison ...
fetching id for I Don't Know What You've Got But It's Got Me - Part I by Little Richard ...
--> [error] Pied Piper by The Changin' Times
fetching id for For You by The Spellbinders ...
fetching id for Don't Have To Shop Around by The Mad Lads ...
fetching id for Say Something Funny by Patty Duke ...
fetching id for Chapel In The Moonlight by The Bachelors ...
fetching id for Just A Little Bit Better by Herman's Hermits ...
--> [error] I Don't Know What You've Got But It's Got Me - Part I by Little Richard
fetching id for I Want To (Do Everything For You) by Joe Tex ...
fetching id for Take Me In Your Arms (Rock Me A Little While) by Kim Weston ...
fetching id for Looking With My Eyes by Dionne Warwick ...
fetching id for Forgive Me by Al Martino ...
--> [error] Forgive Me by Al Martino
fetching id for Roses And Rainbows by Danny Hutton ...
--> [error] Looking With My Eyes by Dionne Warwick
fetching id for The Letter by Sonny & Cher ...
fetching id for If You Don't (Love Me, Tell Me So) by Barbara Mason ...
fetching id for I'm So Thankful by The Ikettes ...
fetching id for Try To Remember by The Brothers Four ...
fetching id for Treat Her Right by Roy Head And The Traits ...
fetching id for Hang On Sloopy by The McCoys ...
fetching id for Liar, Liar by The Castaways ...
fetching id for Not The Lovin' Kind by Dino, Desi & Billy ...
fetching id for Respect by Otis Redding ...
fetching id for The "In" Crowd by Ramsey Lewis Trio ...
fetching id for I Miss You So by Little Anthony And The Imperials ...
fetching id for Do You Believe In Magic by The Lovin' Spoonful ...
fetching id for Hungry For Love by San Remo Golden Strings ...
fetching id for If You've Got A Heart by Bobby Goldsboro ...
fetching id for Steppin' Out by Paul Revere & The Raiders ...
fetching id for I Still Love You by The Vejtables ...
fetching id for Mohair Sam by Charlie Rich ...
fetching id for Baby Don't Go by Sonny & Cher ...
fetching id for Some Enchanted Evening by Jay & The Americans ...
fetching id for Cara-Lin by The Strangeloves ...
fetching id for I'm Yours by Elvis Presley ...
fetching id for The Universal Soldier by Glen Campbell ...
fetching id for Act Naturally by The Beatles ...
fetching id for Universal Soldier by Donovan ...
fetching id for I Live For The Sun by The Sunrays ...
--> [error] I'm So Thankful by The Ikettes
fetching id for Secretly by The Lettermen ...
fetching id for Remember When by Wayne Newton ...
fetching id for A Lifetime Of Loneliness by Jackie DeShannon ...
fetching id for Just Yesterday by Jack Jones ...
--> [error] I Live For The Sun by The Sunrays
fetching id for Inky Dinky Spider (The Spider Song) by The Kids Next Door ...
--> [error] Just Yesterday by Jack Jones
fetching id for So Long Babe by Nancy Sinatra ...
fetching id for I Have Dreamed by Chad & Jeremy ...
fetching id for You Were On My Mind by We Five ...
fetching id for Help! by The Beatles ...
fetching id for Eve Of Destruction by Barry McGuire ...
fetching id for You've Got Your Troubles by The Fortunes ...
fetching id for Catch Us If You Can by The Dave Clark Five ...
fetching id for My Town, My Guy And Me by Lesley Gore ...
fetching id for The Dawn Of Correction by The Spokesmen ...
fetching id for What Color (Is A Man) by Bobby Vinton ...
--> [error] Inky Dinky Spider (The Spider Song) by The Kids Next Door
fetching id for Are You A Boy Or Are You A Girl by The Barbarians ...
fetching id for Just One Kiss From You by The Impressions ...
fetching id for Think by Jimmy McCracklin ...
fetching id for I Believe I'll Love On by Jackie Wilson ...
--> [error] What Color (Is A Man) by Bobby Vinton
fetching id for Autumn Leaves - 1965 by Roger Williams ...
--> [error] I Believe I'll Love On by Jackie Wilson
fetching id for The Organ Grinder's Swing by Jimmy Smith With Kenny Burrell And Grady Tate ...
--> [error] Autumn Leaves - 1965 by Roger Williams
fetching id for Home Of The Brave by Jody Miller ...
fetching id for We Gotta Get Out Of This Place by The Animals ...
fetching id for Laugh At Me by Sonny ...
fetching id for Just You by Sonny & Cher ...
fetching id for Kansas City Star by Roger Miller ...
fetching id for I'll Make All Your Dreams Come True by Ronnie Dove ...
fetching id for Ride Away by Roy Orbison ...
fetching id for There But For Fortune by Joan Baez ...
fetching id for The World Through A Tear by Neil Sedaka ...
fetching id for Funny Little Butterflies by Patty Duke ...
fetching id for Early Morning Rain by Peter, Paul & Mary ...
fetching id for I Need You So by Chuck Jackson & Maxine Brown ...
--> [error] Funny Little Butterflies by Patty Duke
fetching id for With These Hands by Tom Jones ...
fetching id for Ain't It True by Andy Williams ...
fetching id for Heartaches By The Number by Johnny Tillotson ...
fetching id for It Ain't Me Babe by The Turtles ...
fetching id for Heart Full Of Soul by The Yardbirds ...
fetching id for Agent Double-O-Soul by Edwin Starr ...
fetching id for 3rd Man Theme by Herb Alpert & The Tijuana Brass ...
fetching id for Little Miss Sad by The Five Emprees ...
--> [error] Ain't It True by Andy Williams
fetching id for These Hands (Small But Mighty) by Bobby Bland ...
fetching id for Tossing & Turning by The Ivy League ...
fetching id for Right Now And Not Later by The Shangri-Las ...
fetching id for Like A Rolling Stone by Bob Dylan ...
fetching id for I Got You Babe by Sonny & Cher ...
fetching id for Action by Freddy Cannon ...
fetching id for Papa's Got A Brand New Bag (Part I) by James Brown And The Famous Flames ...
--> [error] Little Miss Sad by The Five Emprees
fetching id for Summer Nights by Marianne Faithfull ...
fetching id for Two Different Worlds by Lenny Welch ...
fetching id for High Heel Sneakers by Stevie Wonder ...
fetching id for The Girl From Peyton Place by Dickey Lee ...
fetching id for You Can't Take It Away by Fred Hughes ...
--> [error] Two Different Worlds by Lenny Welch
fetching id for California Girls by The Beach Boys ...
fetching id for Sad, Sad Girl by Barbara Mason ...
fetching id for Hold Me, Thrill Me, Kiss Me by Mel Carter ...
--> [error] You Can't Take It Away by Fred Hughes
fetching id for Houston by Dean Martin ...
fetching id for The Tracks Of My Tears by The Miracles ...
fetching id for I'm A Happy Man by The Jive Five ...
fetching id for (My Girl) Sloopy by Little Caesar And The Consuls ...
--> [error] I'm A Happy Man by The Jive Five
fetching id for Moonlight And Roses (Bring Mem'ries Of You) by Vic Dana ...
--> [error] (My Girl) Sloopy by Little Caesar And The Consuls
fetching id for N-E-R-V-O-U-S! by Ian Whitcomb ...
fetching id for First I Look At The Purse by The Contours ...
fetching id for Danger Heartbreak Dead Ahead by The Marvelettes ...
fetching id for The Sins Of A Family by P.F. Sloan ...
fetching id for The Way Of Love by Kathy Kirby ...
--> [error] The Sins Of A Family by P.F. Sloan
fetching id for Roundabout by Connie Francis ...
--> [error] The Way Of Love by Kathy Kirby
fetching id for For Your Love by Sam & Bill ...
fetching id for It's The Same Old Song by Four Tops ...
fetching id for Nothing But Heartaches by The Supremes ...
fetching id for Who'll Be The Next In Line by The Kinks ...
fetching id for Since I Lost My Baby by The Temptations ...
fetching id for It's Gonna Take A Miracle by The Royalettes ...
--> [error] Roundabout by Connie Francis
fetching id for In The Midnight Hour by Wilson Pickett ...
fetching id for Down In The Boondocks by Billy Joe Royal ...
fetching id for You've Been In Love Too Long by Martha & The Vandellas ...
fetching id for Annie Fanny by The Kingsmen ...
fetching id for I Need You by The Impressions ...
fetching id for Home Of The Brave by Bonnie & The Treasures ...
--> [error] It's Gonna Take A Miracle by The Royalettes
fetching id for Colours by Donovan ...
fetching id for How Nice It Is by Billy Stewart ...
fetching id for Shake And Fingerpop by Jr. Walker & The All Stars ...
fetching id for Baby, I'm Yours by Barbara Lewis ...
fetching id for If I Didn't Love You by Chuck Jackson ...
fetching id for All I Really Want To Do by Cher ...
fetching id for I'll Take You Where The Music's Playing by The Drifters ...
fetching id for Sugar Dumpling by Sam Cooke ...
fetching id for I Don't Wanna Lose You Baby by Chad & Jeremy ...
fetching id for Only Those In Love by Baby Washington ...
fetching id for Give All Your Love To Me by Gerry And The Pacemakers ...
fetching id for If You Wait For Love by Bobby Goldsboro ...
--> [error] Home Of The Brave by Bonnie & The Treasures
fetching id for Is It Really Over? by Jim Reeves ...
fetching id for You're Gonna Make Me Cry by O.V. Wright ...
fetching id for You Can't Be True, Dear by Patti Page ...
fetching id for Me Without You by Mary Wells ...
--> [error] If You Wait For Love by Bobby Goldsboro
fetching id for The Silence (Il Silenzio) by Al Hirt ...
--> [error] Me Without You by Mary Wells
fetching id for Save Your Heart For Me by Gary Lewis And The Playboys ...
fetching id for Looking Through The Eyes Of Love by Gene Pitney ...
fetching id for Ju Ju Hand by Sam The Sham and the Pharaohs ...
fetching id for (I Can't Get No) Satisfaction by The Rolling Stones ...
fetching id for I'm A Fool by Dino, Desi & Billy ...
fetching id for A Little You by Freddie And The Dreamers ...
fetching id for Moon Over Naples by Bert Kaempfert And His Orchestra ...
fetching id for It's A Man Down There by G.L. Crockett ...
--> [error] The Silence (Il Silenzio) by Al Hirt
fetching id for Someone Is Watching by Solomon Burke ...
fetching id for Can't Let You Out Of My Sight by Chuck Jackson & Maxine Brown ...
fetching id for Too Hot To Hold by Major Lance ...
fetching id for Soul Heaven by The Dixie Drifter ...
fetching id for Don't Just Stand There by Patty Duke ...
fetching id for What's New Pussycat? by Tom Jones ...
fetching id for You'd Better Come Home by Petula Clark ...
fetching id for I'm Henry VIII, I Am by Herman's Hermits ...
fetching id for Take Me Back by Little Anthony And The Imperials ...
fetching id for You Tell Me Why by The Beau Brummels ...
fetching id for All I Really Want To Do by The Byrds ...
fetching id for Candy by The Astors ...
fetching id for It's Too Late, Baby Too Late by Arthur Prysock ...
fetching id for No Pity (In The Naked City) by Jackie Wilson ...
fetching id for You Better Go by Derek Martin ...
--> [error] Soul Heaven by The Dixie Drifter
fetching id for Simpel Gimpel by Horst Jankowski ...
fetching id for Good Times by Gene Chandler ...
fetching id for Sunshine, Lollipops And Rainbows by Lesley Gore ...
fetching id for I Want Candy by The Strangeloves ...
fetching id for I Like It Like That by The Dave Clark Five ...
fetching id for Cara, Mia by Jay & The Americans ...
fetching id for I'll Always Love You by The Spinners ...
fetching id for Ride Your Pony by Lee Dorsey ...
fetching id for New Orleans by Eddie Hodges ...
--> [error] You Better Go by Derek Martin
fetching id for Hung On You by The Righteous Brothers ...
fetching id for Summer Wind by Wayne Newton ...
fetching id for What Are We Going To Do? by David Jones ...
--> [error] New Orleans by Eddie Hodges
fetching id for Pretty Little Baby by Marvin Gaye ...
fetching id for To Know You Is To Love You by Peter And Gordon ...
fetching id for One Dyin' And A Buryin' by Roger Miller ...
fetching id for Theme From "A Summer Place" by The Lettermen ...
fetching id for Too Many Rivers by Brenda Lee ...
fetching id for (Say) You're My Girl by Roy Orbison ...
fetching id for You're My Baby (And Don't You Forget It) by The Vacels ...
fetching id for Here I Am by Dionne Warwick ...
fetching id for It's Gonna Be Fine by Glenn Yarbrough ...
fetching id for One Step At A Time by Maxine Brown ...
fetching id for Tickle Me by Elvis Presley With The Jordanaires ...
--> [error] You're My Baby (And Don't You Forget It) by The Vacels
fetching id for Oowee, Oowee by Perry Como ...
--> [error] Tickle Me by Elvis Presley With The Jordanaires
fetching id for Canadian Sunset by Sounds Orchestral ...
--> [error] Oowee, Oowee by Perry Como
fetching id for Yes, I'm Ready by Barbara Mason ...
fetching id for What The World Needs Now Is Love by Jackie DeShannon ...
fetching id for I Can't Help Myself (Sugar Pie Honey Bunch) by Four Tops ...
fetching id for Marie by The Bachelors ...
fetching id for Seventh Son by Johnny Rivers ...
fetching id for You Turn Me On (Turn On Song) by Ian Whitcomb And Bluesville ...
--> [error] Canadian Sunset by Sounds Orchestral
fetching id for Nobody Knows What's Goin' On (In My Mind But Me) by The Chiffons ...
fetching id for The Loser by The Skyliners ...
fetching id for He's Got No Love by The Searchers ...
fetching id for Fly Me To The Moon (In Other Words) by Tony Bennett ...
fetching id for I've Cried My Last Tear by The O'Jays ...
--> [error] The Loser by The Skyliners
fetching id for Sitting In The Park by Billy Stewart ...
fetching id for (Such An) Easy Question by Elvis Presley With The Jordanaires ...
--> [error] I've Cried My Last Tear by The O'Jays
fetching id for Mr. Tambourine Man by The Byrds ...
fetching id for A Little Bit Of Heaven by Ronnie Dove ...
fetching id for Laurie (Strange Things Happen) by Dickey Lee ...
fetching id for Trains And Boats And Planes by Billy J. Kramer With The Dakotas ...
fetching id for Seein' The Right Love Go Wrong by Jack Jones ...
fetching id for I Can't Work No Longer by Billy Butler & The Chanters ...
fetching id for Theme From "Harlow" (Lonely Girl) by Bobby Vinton ...
fetching id for Boot-Leg by Booker T. & The MG's ...
fetching id for Forget Domani by Frank Sinatra ...
--> [error] Seein' The Right Love Go Wrong by Jack Jones
fetching id for Forget Domani by Connie Francis ...
fetching id for I'm A Fool To Care by Ray Charles ...
--> [error] Forget Domani by Frank Sinatra
fetching id for My Man by Barbra Streisand ...
fetching id for After Loving You by Della Reese ...
--> [error] I'm A Fool To Care by Ray Charles
fetching id for We're Doing Fine by Dee Dee Warwick ...
fetching id for Where Were You When I Needed You by Jerry Vale ...
fetching id for Here Comes The Night by Them ...
fetching id for Set Me Free by The Kinks ...
fetching id for A Walk In The Black Forest by Horst Jankowski ...
fetching id for Wooly Bully by Sam The Sham and the Pharaohs ...
fetching id for Tonight's The Night by Solomon Burke ...
fetching id for For Your Love by The Yardbirds ...
fetching id for A World Of Our Own by The Seekers ...
fetching id for Wonderful World by Herman's Hermits ...
fetching id for Girl Come Running by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Oo Wee Baby, I Love You by Fred Hughes ...
fetching id for It's Just A Little Bit Too Late by Wayne Fontana & The Mindbenders ...
fetching id for Silver Threads And Golden Needles by Jody Miller ...
fetching id for Darling Take Me Back by Lenny Welch ...
--> [error] After Loving You by Della Reese
fetching id for Around The Corner by The Duprees ...
fetching id for Ain't That Love by Four Tops ...
fetching id for I've Been Loving You Too Long (To Stop Now) by Otis Redding ...
fetching id for This Little Bird by Marianne Faithfull ...
fetching id for Crying In The Chapel by Elvis Presley With The Jordanaires ...
--> [error] Darling Take Me Back by Lenny Welch
fetching id for Hush, Hush, Sweet Charlotte by Patti Page ...
fetching id for Who's Cheating Who? by Little Milton ...
fetching id for Meeting Over Yonder by The Impressions ...
fetching id for Little Lonely One by Tom Jones ...
fetching id for It Feels So Right by Elvis Presley With The Jordanaires ...
fetching id for Watermelon Man by Gloria Lynne ...
fetching id for You've Never Been In Love Like This Before by Unit Four plus Two ...
fetching id for Buster Browne by Willie Mitchell ...
fetching id for If You Really Want Me To, I'll Go by The Ron-Dels ...
--> [error] Little Lonely One by Tom Jones
fetching id for Yakety Axe by Chet Atkins ...
fetching id for You Really Know How To Hurt A Guy by Jan & Dean ...
fetching id for Shakin' All Over by The Guess Who ...
fetching id for Help Me, Rhonda by The Beach Boys ...
fetching id for Catch The Wind by Donovan ...
fetching id for Give Us Your Blessings by The Shangri-Las ...
fetching id for Do The Boomerang by Jr. Walker & The All Stars ...
fetching id for Summer Sounds by Robert Goulet ...
fetching id for One Monkey Don't Stop No Show by Joe Tex ...
fetching id for Justine by The Righteous Brothers ...
fetching id for Follow Me by The Drifters ...
fetching id for Stop! Look What You're Doing by Carla Thomas ...
fetching id for From A Window by Chad & Jeremy ...
fetching id for Back In My Arms Again by The Supremes ...
fetching id for Voodoo Woman by Bobby Goldsboro ...
--> [error] If You Really Want Me To, I'll Go by The Ron-Dels
fetching id for Before And After by Chad & Jeremy ...
fetching id for Last Chance To Turn Around by Gene Pitney ...
fetching id for I Do by The Marvelows ...
fetching id for I'll Keep Holding On by The Marvelettes ...
fetching id for You'll Never Walk Alone by Gerry And The Pacemakers ...
fetching id for Temptation 'Bout To Get Me by The Knight Bros. ...
--> [error] Voodoo Woman by Bobby Goldsboro
fetching id for What's He Doing In My World by Eddy Arnold ...
fetching id for He's A Lover by Mary Wells ...
fetching id for I Love You So by Bobbi Martin ...
--> [error] Temptation 'Bout To Get Me by The Knight Bros.
fetching id for The First Thing Ev'ry Morning (And The Last Thing Ev'ry Night) by Jimmy Dean ...
fetching id for I Want You Back Again by The Zombies ...
fetching id for Just A Little by The Beau Brummels ...
fetching id for Engine Engine #9 by Roger Miller ...
fetching id for Nothing Can Stop Me by Gene Chandler ...
fetching id for Ticket To Ride by The Beatles ...
fetching id for And I Love Him by Esther Phillips ...
fetching id for When A Boy Falls In Love by Sam Cooke ...
fetching id for (Remember Me) I'm The One Who Loves You by Dean Martin ...
fetching id for Operator by Brenda Holloway ...
fetching id for Soul Sauce (Guacha Guaro) by Cal Tjader ...
fetching id for Cast Your Fate To The Wind by Steve Alaimo ...
fetching id for Blue Shadows by B.B. King ...
fetching id for Love Me Now by Brook Benton ...
fetching id for Mrs. Brown You've Got A Lovely Daughter by Herman's Hermits ...
fetching id for True Love Ways by Peter And Gordon ...
fetching id for It's Not Unusual by Tom Jones ...
fetching id for L-O-N-E-L-Y by Bobby Vinton ...
fetching id for Concrete And Clay by Unit Four plus Two ...
fetching id for Silhouettes by Herman's Hermits ...
fetching id for I'll Be With You In Apple Blossom Time by Wayne Newton ...
fetching id for Concrete And Clay by Eddie Rambeau ...
fetching id for Tell Her (You Love Her Every Day) by Frank Sinatra ...
fetching id for It's Wonderful To Be In Love by The Ovations (Featuring Louis Williams) ...
--> [error] I Love You So by Bobbi Martin
fetching id for Bring A Little Sunshine (To My Heart) by Vic Dana ...
fetching id for The Puzzle Song (A Puzzle In Song) by Shirley Ellis ...
fetching id for Are You Sincere by Trini Lopez ...
fetching id for Then I'll Count Again by Johnny Tillotson ...
fetching id for My Cherie by Al Martino ...
fetching id for Girl On The Billboard by Del Reeves ...
fetching id for Long Live Love by Sandie Shaw ...
fetching id for Just Once In My Life by The Righteous Brothers ...
fetching id for You Were Only Fooling (While I Was Falling In Love) by Vic Damone ...
fetching id for Cast Your Fate To The Wind by Sounds Orchestral ...
fetching id for Bring It On Home To Me by The Animals ...
fetching id for Three O'Clock In The Morning by Bert Kaempfert And His Orchestra ...
--> [error] It's Wonderful To Be In Love by The Ovations (Featuring Louis Williams)
fetching id for Queen Of The House by Jody Miller ...
fetching id for I'll Never Find Another You by The Seekers ...
fetching id for She's About A Mover by Sir Douglas Quintet ...
fetching id for Lipstick Traces (On A Cigarette) by The O'Jays ...
--> [error] Three O'Clock In The Morning by Bert Kaempfert And His Orchestra
fetching id for Something You Got by Chuck Jackson & Maxine Brown ...
fetching id for Boo-Ga-Loo by Tom and Jerrio ...
--> [error] Lipstick Traces (On A Cigarette) by The O'Jays
fetching id for Love Is A 5-Letter Word by James Phelps ...
fetching id for Is This What I Get For Loving You? by The Ronettes Featuring Veronica ...
fetching id for Lip Sync (To The Tongue Twisters) by Len Barry ...
fetching id for Ain't It A Shame by Major Lance ...
fetching id for From The Bottom Of My Heart (I Love You) by The Moody Blues ...
fetching id for Welcome Home by Walter Jackson ...
fetching id for Baby The Rain Must Fall by Glenn Yarbrough ...
fetching id for Do The Freddie by Freddie And The Dreamers ...
fetching id for You Were Made For Me by Freddie And The Dreamers ...
fetching id for Count Me In by Gary Lewis And The Playboys ...
fetching id for Reelin' And Rockin' by The Dave Clark Five ...
fetching id for Dream On Little Dreamer by Perry Como ...
fetching id for Wishing It Was You by Connie Francis ...
fetching id for The Climb by The Kingsmen ...
fetching id for It's Almost Tomorrow by Jimmy Velvet ...
fetching id for Tears Keep On Falling by Jerry Vale ...
fetching id for We're Gonna Make It by Little Milton ...
fetching id for Ooo Baby Baby by The Miracles ...
fetching id for Iko Iko by The Dixie Cups ...
fetching id for I'll Be Doggone by Marvin Gaye ...
fetching id for I Know A Place by Petula Clark ...
fetching id for Now That You've Gone by Connie Stevens ...
--> [error] Wishing It Was You by Connie Francis
fetching id for You Can Have Her by The Righteous Brothers ...
fetching id for Three O'Clock In The Morning by Lou Rawls ...
fetching id for Keep On Trying by Bobby Vee ...
fetching id for You'll Miss Me (When I'm Gone) by Fontella Bass & Bobby McClure ...
--> [error] Now That You've Gone by Connie Stevens
fetching id for Break Up by Del Shannon ...
fetching id for Game Of Love by Wayne Fontana & The Mindbenders ...
fetching id for The Last Time by The Rolling Stones ...
fetching id for It's Growing by The Temptations ...
fetching id for Land Of 1000 Dances by Cannibal And The Headhunters ...
fetching id for It's Gonna Be Alright by Gerry And The Pacemakers ...
fetching id for Let's Do The Freddie by Chubby Checker ...
fetching id for Al's Place by Al (He's the King) Hirt ...
--> [error] You'll Miss Me (When I'm Gone) by Fontella Bass & Bobby McClure
fetching id for What Do You Want With Me by Chad & Jeremy ...
fetching id for Peanuts (La Cacahuata) by The Sunglows ...
--> [error] Al's Place by Al (He's the King) Hirt
fetching id for Georgie Porgie by Jewel Akens ...
fetching id for Gotta Have Your Love by The Sapphires ...
fetching id for Good Lovin' by The Olympics ...
fetching id for The Mouse by Soupy Sales ...
fetching id for When The Ship Comes In by Peter, Paul & Mary ...
fetching id for No One by Brenda Lee ...
fetching id for It Ain't No Big Thing by Radiants ...
fetching id for One Kiss For Old Times' Sake by Ronnie Dove ...
fetching id for I'm Telling You Now by Freddie And The Dreamers ...
fetching id for Tired Of Waiting For You by The Kinks ...
fetching id for Go Now! by The Moody Blues ...
fetching id for Subterranean Homesick Blues by Bob Dylan ...
fetching id for It's Got The Whole World Shakin' by Sam Cooke ...
fetching id for The Entertainer by Tony Clarke ...
fetching id for Yes It Is by The Beatles ...
fetching id for Come On Over To My Place by The Drifters ...
fetching id for A Woman Can Change A Man by Joe Tex ...
fetching id for Super-cali-fragil-istic-expi-ali-docious by Julie Andrews-Dick Van Dyke ...
--> [error] Peanuts (La Cacahuata) by The Sunglows
fetching id for Before You Go by Buck Owens ...
fetching id for Chim, Chim, Cheree by The New Christy Minstrels ...
--> [error] Super-cali-fragil-istic-expi-ali-docious by Julie Andrews-Dick Van Dyke
fetching id for Tommy by Reparata And The Delrons ...
--> [error] Chim, Chim, Cheree by The New Christy Minstrels
fetching id for Play With Fire by The Rolling Stones ...
fetching id for Woman's Got Soul by The Impressions ...
fetching id for The Clapping Song (Clap Pat Clap Slap) by Shirley Ellis ...
fetching id for Shotgun by Jr. Walker & The All Stars ...
fetching id for ......And Roses And Roses by Andy Williams ...
fetching id for Crazy Downtown by Allan Sherman ...
fetching id for Goodbye My Lover Goodbye by The Searchers ...
fetching id for Think Of The Good Times by Jay & The Americans ...
fetching id for (The Bees Are For The Birds) The Birds Are For The Bees by The Newbeats ...
fetching id for She's Coming Home by The Zombies ...
fetching id for I Need You by Chuck Jackson ...
fetching id for (See You At The) "Go-Go" by Dobie Gray ...
fetching id for Stop! In The Name Of Love by The Supremes ...
fetching id for Nowhere To Run by Martha & The Vandellas ...
fetching id for Can't You Hear My Heartbeat by Herman's Hermits ...
fetching id for Got To Get You Off My Mind by Solomon Burke ...
fetching id for Bumble Bee by The Searchers ...
fetching id for I Understand (Just How You Feel) by Freddie And The Dreamers ...
fetching id for The Race Is On by Jack Jones ...
fetching id for Out In The Streets by The Shangri-Las ...
fetching id for The Barracuda by Alvin Cash & The Crawlers ...
--> [error] Tommy by Reparata And The Delrons
fetching id for Truly, Truly, True by Brenda Lee ...
fetching id for Whipped Cream by Herb Alpert's Tijuana Brass ...
fetching id for Toy Soldier by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for In The Meantime by Georgie Fame And The Blue Flames ...
fetching id for Peaches "N" Cream by The Ikettes ...
--> [error] Truly, Truly, True by Brenda Lee
fetching id for When I'm Gone by Brenda Holloway ...
fetching id for Girl Don't Come by Sandie Shaw ...
fetching id for I Can't Stop Thinking Of You by Bobbi Martin ...
--> [error] Peaches "N" Cream by The Ikettes
fetching id for Somebody Else Is Taking My Place by Al Martino ...
--> [error] I Can't Stop Thinking Of You by Bobbi Martin
fetching id for 10 Little Bottles by Johnny Bond ...
fetching id for You Can Have Him by Dionne Warwick ...
fetching id for Why Did I Choose You by Barbra Streisand ...
fetching id for All Of My Life by Lesley Gore ...
fetching id for I Gotta Woman (Part One) by Ray Charles and his Orchestra ...
--> [error] Somebody Else Is Taking My Place by Al Martino
fetching id for Chains Of Love by The Drifters ...
fetching id for Sad Tomorrows by Trini Lopez ...
fetching id for Apples And Bananas by Lawrence Welk And His Orchestra ...
--> [error] I Gotta Woman (Part One) by Ray Charles and his Orchestra
fetching id for He Ain't No Angel by The Ad Libs ...
fetching id for King Of The Road by Roger Miller ...
fetching id for The Birds And The Bees by Jewel Akens ...
fetching id for Red Roses For A Blue Lady by Vic Dana ...
fetching id for Come And Stay With Me by Marianne Faithfull ...
fetching id for Red Roses For A Blue Lady by Wayne Newton ...
fetching id for Eight Days A Week by The Beatles ...
fetching id for Goldfinger by Shirley Bassey ...
fetching id for Never, Never Leave Me by Mary Wells ...
--> [error] Apples And Bananas by Lawrence Welk And His Orchestra
fetching id for Not Too Long Ago by The Uniques Featuring Joe Stampley ...
fetching id for Hawaii Honeymoon by The Waikikis ...
--> [error] Never, Never Leave Me by Mary Wells
fetching id for Come Back Baby by Roddie Joy ...
--> [error] Hawaii Honeymoon by The Waikikis
fetching id for Ain't No Telling by Bobby Bland ...
fetching id for Mexican Pearls by Billy Vaughn And His Orchestra ...
--> [error] Mexican Pearls by Billy Vaughn And His Orchestra
fetching id for Dear Dad by Chuck Berry ...
fetching id for Talk About Love by Adam Faith ...
fetching id for Do You Wanna Dance? by The Beach Boys ...
fetching id for Long Lonely Nights by Bobby Vinton ...
fetching id for Red Roses For A Blue Lady by Bert Kaempfert And His Orchestra ...
fetching id for Do The Clam by Elvis Presley With The Jordanaires, Jubilee Four & Carol Lombard Trio ...
--> [error] Come Back Baby by Roddie Joy
fetching id for If I Loved You by Chad & Jeremy ...
fetching id for Ferry Cross The Mersey by Gerry And The Pacemakers ...
fetching id for Send Me The Pillow You Dream On by Dean Martin ...
fetching id for Don't Mess Up A Good Thing by Fontella Bass & Bobby McClure ...
fetching id for Anytime At All by Frank Sinatra ...
fetching id for Come See by Major Lance ...
fetching id for Mr. Pitiful by Otis Redding ...
fetching id for The Record (Baby I Love You) by Ben E. King ...
fetching id for Try To Remember by Roger Williams ...
--> [error] Anytime At All by Frank Sinatra
fetching id for I've Got Five Dollars And It's Saturday Night by George & Gene ...
fetching id for Don't Let Me Be Misunderstood by The Animals ...
fetching id for My Girl by The Temptations ...
fetching id for Little Things by Bobby Goldsboro ...
fetching id for If I Ruled The World by Tony Bennett ...
fetching id for I Must Be Seeing Things by Gene Pitney ...
fetching id for For Mama (La Mamma) by Connie Francis ...
--> [error] Try To Remember by Roger Williams
fetching id for For Mama by Jerry Vale ...
fetching id for Poor Man's Son by The Reflections ...
fetching id for (Here They Come) From All Over The World by Jan & Dean ...
fetching id for Gee Baby (I'm Sorry) by The Three Degrees ...
fetching id for Every Night, Every Day by Jimmy McCracklin ...
fetching id for Don't Let Your Left Hand Know by Joe Tex ...
fetching id for This Diamond Ring by Gary Lewis And The Playboys ...
fetching id for Yeh, Yeh by Georgie Fame And The Blue Flames ...
fetching id for People Get Ready by The Impressions ...
fetching id for Come Home by The Dave Clark Five ...
fetching id for Hurt So Bad by Little Anthony And The Imperials ...
fetching id for Stranger In Town by Del Shannon ...
fetching id for You Better Get It by Joe Tex ...
fetching id for You Got What It Takes by Joe Tex ...
fetching id for Who Can I Turn To by Dionne Warwick ...
fetching id for Please Let Me Wonder by The Beach Boys ...
fetching id for Land Of A Thousand Dances (Part I) by Thee Midniters ...
fetching id for Double-O-Seven by The Detergents ...
--> [error] Poor Man's Son by The Reflections
fetching id for Losing You by Dusty Springfield ...
fetching id for I Can't Explain by The Who ...
fetching id for El Pussy Cat by Mongo Santamaria ...
fetching id for The Jolly Green Giant by The Kingsmen ...
fetching id for Goodnight by Roy Orbison ...
fetching id for Ask The Lonely by Four Tops ...
fetching id for Downtown by Petula Clark ...
fetching id for Midnight Special by Johnny Rivers ...
fetching id for You've Lost That Lovin' Feelin' by The Righteous Brothers ...
fetching id for I Don't Want To Spoil The Party by The Beatles ...
fetching id for New York's A Lonely Town by The Trade Winds ...
--> [error] Double-O-Seven by The Detergents
fetching id for Come Tomorrow by Manfred Mann ...
fetching id for Angel by Johnny Tillotson ...
fetching id for 4 - By The Beatles by The Beatles ...
fetching id for Goldfinger by John Barry and His Orchestra ...
fetching id for It's Gonna Be Alright by Maxine Brown ...
fetching id for Apache '65 by The Arrows Featuring Davie Allan ...
fetching id for Good Times by Jerry Butler ...
fetching id for Be My Baby by Dick and DeeDee ...
--> [error] New York's A Lonely Town by The Trade Winds
fetching id for Mean Old World by Rick Nelson ...
fetching id for Teasin' You by Willie Tee ...
fetching id for The Boy From New York City by The Ad Libs ...
fetching id for Tell Her No by The Zombies ...
fetching id for Laugh, Laugh by The Beau Brummels ...
fetching id for I Go To Pieces by Peter And Gordon ...
fetching id for Shake by Sam Cooke ...
fetching id for I've Got A Tiger By The Tail by Buck Owens ...
fetching id for Goldfinger by Billy Strange ...
fetching id for Cry by Ray Charles ...
fetching id for This Is My Prayer by The Ray Charles Singers ...
fetching id for Did You Ever by The Hullaballoos ...
fetching id for Orange Blossom Special by Johnny Cash ...
fetching id for Find My Way Back Home by The Nashville Teens ...
fetching id for Twine Time by Alvin Cash & The Crawlers ...
fetching id for The Name Game by Shirley Ellis ...
fetching id for All Day And All Of The Night by The Kinks ...
fetching id for A Change Is Gonna Come by Sam Cooke ...
fetching id for What Have They Done To The Rain by The Searchers ...
fetching id for Bye, Bye, Baby (Baby, Goodbye) by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Born To Be Together by The Ronettes Featuring Veronica ...
fetching id for Like A Child by Julie Rogers ...
--> [error] This Is My Prayer by The Ray Charles Singers
fetching id for Cupid by Johnny Rivers ...
fetching id for Real Live Girl by Steve Alaimo ...
fetching id for This Is It by Jim Reeves ...
fetching id for Pass Me By by Peggy Lee ...
fetching id for Danny Boy by Jackie Wilson ...
fetching id for This Sporting Life by Ian Whitcomb And Bluesville ...
fetching id for The "In" Crowd by Dobie Gray ...
fetching id for Lemon Tree by Trini Lopez ...
fetching id for For Lovin' Me by Peter, Paul & Mary ...
fetching id for Paper Tiger by Sue Thompson ...
fetching id for Heart Of Stone by The Rolling Stones ...
fetching id for It's Alright by Adam Faith With The Roulettes ...
--> [error] Like A Child by Julie Rogers
fetching id for Break Away (From That Boy) by The Newbeats ...
fetching id for At The Club by The Drifters ...
fetching id for Whose Heart Are You Breaking Tonight by Connie Francis ...
fetching id for Whenever A Teenager Cries by Reparata And The Delrons ...
fetching id for He Was Really Sayin' Somethin' by The Velvelettes ...
fetching id for It's Gotta Last Forever by Billy J. Kramer With The Dakotas ...
fetching id for I Wanna Be (Your Everything) by The Manhattans ...
fetching id for Does He Really Care For Me by Ruby And The Romantics ...
--> [error] Whenever A Teenager Cries by Reparata And The Delrons
fetching id for You Can't Hurt Me No More by Gene Chandler ...
fetching id for Goldfinger by Jack Laforge His Piano and Orchestra ...
fetching id for You're Next by Jimmy Witherspoon ...
fetching id for Let's Lock The Door (And Throw Away The Key) by Jay & The Americans ...
fetching id for Love Potion Number Nine by The Searchers ...
fetching id for No Arms Can Ever Hold You by The Bachelors ...
fetching id for Hold What You've Got by Joe Tex ...
fetching id for Thanks A Lot by Brenda Lee ...
--> [error] Goldfinger by Jack Laforge His Piano and Orchestra
fetching id for Fancy Pants by Al (He's the King) Hirt ...
--> [error] Thanks A Lot by Brenda Lee
fetching id for My Heart Would Know by Al Martino ...
--> [error] Fancy Pants by Al (He's the King) Hirt
fetching id for Dusty by The Rag Dolls ...
--> [error] My Heart Would Know by Al Martino
fetching id for Don't Come Running Back To Me by Nancy Wilson ...
fetching id for Married Man by Richard Burton ...
fetching id for Comin' On Too Strong by Wayne Newton ...
fetching id for Try To Remember by Ed Ames ...
fetching id for You Can Have Him by Timi Yuro ...
fetching id for Keep Searchin' (We'll Follow The Sun) by Del Shannon ...
fetching id for How Sweet It Is To Be Loved By You by Marvin Gaye ...
fetching id for Look Of Love by Lesley Gore ...
fetching id for Give Him A Great Big Kiss by The Shangri-Las ...
fetching id for Have You Looked Into Your Heart by Jerry Vale ...
fetching id for Somewhere In Your Heart by Frank Sinatra ...
fetching id for Do What You Do Do Well by Ned Miller ...
fetching id for Voice Your Choice by The Radiants ...
fetching id for Hello Pretty Girl by Ronnie Dove ...
fetching id for That's How Strong My Love Is by Otis Redding ...
fetching id for Hello, Dolly! by Bobby Darin ...
fetching id for Fly Me To The Moon by LaVern Baker ...
fetching id for Jerk And Twine by Jackie Ross ...
fetching id for Somewhere by P.J. Proby ...
fetching id for I'm Over You by Jan Bradley ...
fetching id for Come See About Me by The Supremes ...
fetching id for Thou Shalt Not Steal by Dick and DeeDee ...
fetching id for Don't Forget I Still Love You by Bobbi Martin ...
--> [error] Dusty by The Rag Dolls
fetching id for I Feel Fine by The Beatles ...
fetching id for I'll Be There by Gerry And The Pacemakers ...
fetching id for Little Bell by The Dixie Cups ...
fetching id for Ode To The Little Brown Shack Out Back by Billy Edd Wheeler ...
fetching id for Diamond Head by The Ventures ...
fetching id for The Man by Lorne Greene ...
fetching id for Bring Your Love To Me by The Righteous Brothers ...
fetching id for Crying In The Chapel by Adam Wade ...
--> [error] Don't Forget I Still Love You by Bobbi Martin
fetching id for I Want My Baby Back by Jimmy Cross ...
fetching id for Suddenly I'm All Alone by Walter Jackson ...
fetching id for Diana by Bobby Rydell ...
fetching id for Goin' Out Of My Head by Little Anthony And The Imperials ...
fetching id for The Jerk by The Larks ...
fetching id for You're Nobody Till Somebody Loves You by Dean Martin ...
fetching id for Mr. Lonely by Bobby Vinton ...
fetching id for Use Your Head by Mary Wells ...
--> [error] Crying In The Chapel by Adam Wade
fetching id for Dear Heart by Andy Williams ...
fetching id for Willow Weep For Me by Chad & Jeremy ...
fetching id for Dear Heart by Jack Jones ...
fetching id for He's My Guy by Irma Thomas ...
fetching id for Cousin Of Mine by Sam Cooke ...
fetching id for No Faith, No Love by Mitty Collier ...
fetching id for Can't You Just See Me by Aretha Franklin ...
fetching id for Cross My Heart by Bobby Vee ...
fetching id for Love Me As Though There Were No Tomorrow by Sonny Knight ...
--> [error] Use Your Head by Mary Wells
fetching id for She's A Woman by The Beatles ...
fetching id for The Wedding by Julie Rogers ...
fetching id for Any Way You Want It by The Dave Clark Five ...
fetching id for My Love, Forgive Me (Amore, Scusami) by Robert Goulet ...
fetching id for Amen by The Impressions ...
fetching id for Can You Jerk Like Me by The Contours ...
fetching id for I Can't Stop by The Honeycombs ...
fetching id for Sha La La by Manfred Mann ...
fetching id for Come On Do The Jerk by The Miracles ...
fetching id for Hawaii Tattoo by The Waikikis ...
fetching id for Seven Letters by Ben E. King ...
fetching id for Makin' Whoopee by Ray Charles ...
--> [error] Love Me As Though There Were No Tomorrow by Sonny Knight
fetching id for Bucket "T" by Ronny And The Daytonas ...
fetching id for Lovin' Place by Gale Garnett ...
fetching id for Hey-Da-Da-Dow by The Dolphins ...
fetching id for The Crying Game by Brenda Lee ...
fetching id for Too Many Fish In The Sea by The Marvelettes ...
fetching id for She's Not There by The Zombies ...
fetching id for Walk Away by Matt Monro ...
fetching id for As Tears Go By by Marianne Faithfull ...
fetching id for Leader Of The Laundromat by The Detergents ...
fetching id for Promised Land by Chuck Berry ...
fetching id for What Now by Gene Chandler ...
fetching id for Roses Are Red My Love by The "You Know Who" Group! ...
--> [error] Makin' Whoopee by Ray Charles
fetching id for The 81 by Candy & The Kisses ...
fetching id for You'll Always Be The One I Love by Dean Martin ...
fetching id for I'm Gonna Love You Too by The Hullaballoos ...
fetching id for Lovely, Lovely (Loverly, Loverly) by Chubby Checker ...
fetching id for Sometimes I Wonder by Major Lance ...
fetching id for Blind Man by Bobby Bland ...
fetching id for Dear Heart by Henry Mancini And His Orchestra ...
--> [error] Roses Are Red My Love by The "You Know Who" Group!
fetching id for The Crusher by The Novas ...
fetching id for Blind Man by Little Milton ...
fetching id for The Race Is On by George Jones ...
fetching id for You're The Only World I Know by Sonny James ...
fetching id for Do-Wacka-Do by Roger Miller ...
fetching id for Wild One by Martha & The Vandellas ...
fetching id for One More Time by The Ray Charles Singers ...
--> [error] Dear Heart by Henry Mancini And His Orchestra
fetching id for Oh No Not My Baby by Maxine Brown ...
fetching id for Dance, Dance, Dance by The Beach Boys ...
fetching id for Ringo by Lorne Greene ...
fetching id for Boom Boom by The Animals ...
fetching id for Smile by Betty Everett & Jerry Butler ...
--> [error] One More Time by The Ray Charles Singers
fetching id for I Found A Love Oh What A Love by Jo Ann & Troy ...
--> [error] Smile by Betty Everett & Jerry Butler
fetching id for Are You Still My Baby by The Shirelles ...
--> [error] I Found A Love Oh What A Love by Jo Ann & Troy
fetching id for It's Better To Have It by Barbara Lynn ...
fetching id for Finders Keepers, Losers Weepers by Nella Dodds ...
--> [error] Are You Still My Baby by The Shirelles
fetching id for I Want You To Be My Boy by The Exciters ...
fetching id for I'm Into Something Good by Herman's Hermits ...
fetching id for Saturday Night At The Movies by The Drifters ...
fetching id for Mountain Of Love by Johnny Rivers ...
fetching id for She Understands Me by Johnny Tillotson ...
fetching id for I'm Gonna Be Strong by Gene Pitney ...
fetching id for Time Is On My Side by The Rolling Stones ...
fetching id for (There's) Always Something There To Remind Me by Sandie Shaw ...
fetching id for Do It Right by Brook Benton ...
fetching id for Danny Boy by Patti LaBelle And The Blue Belles ...
--> [error] Finders Keepers, Losers Weepers by Nella Dodds
fetching id for (There'll Come A Day When) Ev'ry Little Bit Hurts by Bobby Vee ...
--> [error] Danny Boy by Patti LaBelle And The Blue Belles
fetching id for The Sidewinder, Part 1 by Lee Morgan ...
--> [error] (There'll Come A Day When) Ev'ry Little Bit Hurts by Bobby Vee
fetching id for Walking In The Rain by The Ronettes ...
fetching id for You Really Got Me by The Kinks ...
fetching id for Since I Don't Have You by Chuck Jackson ...
fetching id for Run, Run, Run by The Gestures ...
fetching id for Getting Mighty Crowded by Betty Everett ...
fetching id for Do Anything You Wanna (Part I) by Harold Betters ...
fetching id for Percolatin' by Willie Mitchell ...
fetching id for My Buddy Seat by The Hondells ...
fetching id for So What by Bill Black's Combo ...
fetching id for Maybe by The Shangri-Las ...
fetching id for Have Mercy Baby by James Brown And The Famous Flames ...
fetching id for A Little Bit Of Soap by Garnet Mimms ...
fetching id for Take This Hurt Off Me by Don Covay ...
fetching id for Black Night by Bobby Bland ...
fetching id for Leader Of The Pack by The Shangri-Las ...
fetching id for Baby Love by The Supremes ...
fetching id for Right Or Wrong by Ronnie Dove ...
fetching id for Ask Me by Elvis Presley With The Jordanaires ...
--> [error] The Sidewinder, Part 1 by Lee Morgan
fetching id for Without The One You Love (Life's Not Worth While) by Four Tops ...
fetching id for The Price by Solomon Burke ...
fetching id for It's All Over by Walter Jackson ...
fetching id for Scratchy by Travis Wammack ...
fetching id for Endless Sleep by Hank Williams Jr. ...
fetching id for Big Man In Town by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Everything's Alright by The Newbeats ...
fetching id for Sidewalk Surfin' by Jan & Dean ...
fetching id for Come A Little Bit Closer by Jay & The Americans ...
fetching id for Gone, Gone, Gone by The Everly Brothers ...
fetching id for Ain't It The Truth by Mary Wells ...
fetching id for It Ain't Me, Babe by Johnny Cash ...
fetching id for A Woman's Love by Carla Thomas ...
fetching id for I Don't Want To Walk Without You by Phyllis McGuire ...
fetching id for A Happy Guy by Rick Nelson ...
fetching id for Party Girl by Tommy Roe ...
fetching id for I Just Can't Say Goodbye by Bobby Rydell ...
fetching id for Reach Out For Me by Dionne Warwick ...
fetching id for Is It True by Brenda Lee ...
fetching id for Ain't That Loving You Baby by Elvis Presley ...
fetching id for We Could by Al Martino ...
fetching id for Have I The Right? by The Honeycombs ...
fetching id for Listen Lonely Girl by Johnny Mathis ...
fetching id for Opportunity by The Jewels ...
fetching id for Four Strong Winds by Bobby Bare ...
fetching id for If You Want This Love by Sonny Knight ...
fetching id for Almost There by Andy Williams ...
fetching id for Talk To Me Baby by Barry Mann ...
fetching id for Chained And Bound by Otis Redding ...
fetching id for Fiddler On The Roof by The Village Stompers ...
fetching id for It'll Never Be Over For Me by Baby Washington ...
fetching id for Unless You Care by Terry Black ...
fetching id for Oh, Pretty Woman by Roy Orbison And The Candy Men ...
--> [error] I Don't Want To Walk Without You by Phyllis McGuire
fetching id for The Door Is Still Open To My Heart by Dean Martin ...
fetching id for Slaughter On Tenth Avenue by The Ventures ...
fetching id for Who Can I Turn To (When Nobody Needs Me) by Tony Bennett ...
fetching id for Shaggy Dog by Mickey Lee Lane ...
fetching id for Don't Ever Leave Me by Connie Francis ...
fetching id for S-W-I-M by Bobby Freeman ...
fetching id for Needle In A Haystack by The Velvelettes ...
fetching id for California Bound by Ronny And The Daytonas ...
fetching id for Hey Little One by J. Frank Wilson and The Cavaliers ...
--> [error] Oh, Pretty Woman by Roy Orbison And The Candy Men
fetching id for Here She Comes by The Tymes ...
fetching id for I Won't Forget You by Jim Reeves ...
fetching id for Pretend You Don't See Her by Bobby Vee ...
fetching id for Rome Will Never Leave You by Richard Chamberlain ...
--> [error] Hey Little One by J. Frank Wilson and The Cavaliers
fetching id for Do Wah Diddy Diddy by Manfred Mann ...
fetching id for Let It Be Me by Betty Everett & Jerry Butler ...
--> [error] Rome Will Never Leave You by Richard Chamberlain
fetching id for I Don't Want To See You Again by Peter And Gordon ...
fetching id for Little Honda by The Hondells ...
fetching id for We'll Sing In The Sunshine by Gale Garnett ...
fetching id for Chug-A-Lug by Roger Miller ...
fetching id for When You Walk In The Room by The Searchers ...
fetching id for You Should Have Seen The Way He Looked At Me by The Dixie Cups ...
fetching id for Ain't Doing Too Bad (Part 1) by Bobby Bland ...
fetching id for Little Marie by Chuck Berry ...
fetching id for I Had A Talk With My Man by Mitty Collier ...
fetching id for What Good Am I Without You by Marvin Gaye & Kim Weston ...
fetching id for Come See About Me by Nella Dodds ...
--> [error] Ain't Doing Too Bad (Part 1) by Bobby Bland
fetching id for Silly Little Girl by The Tams ...
fetching id for Stop Takin' Me For Granted by Mary Wells ...
--> [error] Come See About Me by Nella Dodds
fetching id for I Like It by Gerry And The Pacemakers ...
fetching id for Everybody Knows (I Still Love You) by The Dave Clark Five ...
fetching id for I'm Crying by The Animals ...
fetching id for Tobacco Road by The Nashville Teens ...
fetching id for Dancing In The Street by Martha & The Vandellas ...
fetching id for Bless Our Love by Gene Chandler ...
--> [error] Stop Takin' Me For Granted by Mary Wells
fetching id for Wendy by The Beach Boys ...
fetching id for Runnin' Out Of Fools by Aretha Franklin ...
fetching id for Jump Back by Rufus Thomas ...
fetching id for Teen Beat '65 by Sandy Nelson ...
fetching id for When You're Young And In Love by Ruby And The Romantics ...
fetching id for Hey Now by Lesley Gore ...
--> [error] Bless Our Love by Gene Chandler
fetching id for Beautician Blues by B.B. King ...
fetching id for The Dodo by Jumpin' Gene Simmons ...
fetching id for Sometimes I Wish I Were A Boy by Lesley Gore ...
fetching id for I've Got The Skill by Jackie Ross ...
--> [error] The Dodo by Jumpin' Gene Simmons
fetching id for Never Trust A Woman by B.B. King ...
fetching id for High Heel Sneakers by Jerry Lee Lewis ...
fetching id for Why (Doncha Be My Girl) by The Chartbusters ...
--> [error] I've Got The Skill by Jackie Ross
fetching id for A Summer Song by Chad & Jeremy ...
fetching id for Baby Don't You Do It by Marvin Gaye ...
fetching id for Softly, As I Leave You by Frank Sinatra ...
fetching id for I Don't Want To See Tomorrow by Nat King Cole ...
fetching id for Something You Got by Ramsey Lewis Trio ...
fetching id for Lumberjack by Brook Benton ...
fetching id for Little Honda by The Beach Boys ...
fetching id for Look Away by Garnet Mimms ...
fetching id for That's Where It's At by Sam Cooke ...
fetching id for Times Have Changed by Irma Thomas ...
fetching id for When I Grow Up (To Be A Man) by The Beach Boys ...
fetching id for Ride The Wild Surf by Jan & Dean ...
fetching id for You Must Believe Me by The Impressions ...
fetching id for I've Got Sand In My Shoes by The Drifters ...
fetching id for Mercy, Mercy by Don Covay & The Goodtimers ...
fetching id for All Cried Out by Dusty Springfield ...
fetching id for Death Of An Angel by The Kingsmen ...
fetching id for Funny Girl by Barbra Streisand ...
fetching id for Smack Dab In The Middle by Ray Charles and his Orchestra ...
fetching id for Baby Be Mine by The Jelly Beans ...
--> [error] Smack Dab In The Middle by Ray Charles and his Orchestra
fetching id for So Long Dearie by Louis Armstrong ...
fetching id for Up Above My Head (I Hear Music In The Air) by Al (He's the King) Hirt ...
--> [error] Baby Be Mine by The Jelly Beans
fetching id for Maybe Tonight by The Shirelles ...
fetching id for Heartbreak Hill by Fats Domino ...
--> [error] Up Above My Head (I Hear Music In The Air) by Al (He's the King) Hirt
fetching id for It Hurts To Be In Love by Gene Pitney ...
fetching id for Baby I Need Your Loving by Four Tops ...
fetching id for Remember (Walkin' in the Sand) by The Shangri-Las ...
fetching id for Bread And Butter by The Newbeats ...
fetching id for On The Street Where You Live by Andy Williams ...
fetching id for Girl (Why You Wanna Make Me Blue) by The Temptations ...
--> [error] Heartbreak Hill by Fats Domino
fetching id for Funny (How Time Slips Away) by Joe Hinton ...
fetching id for 20-75 by Willie Mitchell ...
fetching id for Do You Want To Dance by Del Shannon ...
fetching id for I Wanna Be With You by Nancy Wilson ...
--> [error] Do You Want To Dance by Del Shannon
fetching id for La La La La La by The Blendells ...
--> [error] I Wanna Be With You by Nancy Wilson
fetching id for Baby Baby All The Time by The Superbs ...
fetching id for Garden In The Rain by Vic Dana ...
fetching id for I'm On The Outside (Looking In) by Little Anthony And The Imperials ...
fetching id for G.T.O. by Ronny And The Daytonas ...
fetching id for That's What Love Is Made Of by The Miracles ...
fetching id for Rhythm by Major Lance ...
fetching id for From A Window by Billy J. Kramer With The Dakotas ...
fetching id for Matchbox by The Beatles ...
fetching id for Good Night Baby by The Butterflys ...
fetching id for The James Bond Theme by Billy Strange ...
fetching id for Beach Girl by Pat Boone ...
--> [error] Good Night Baby by The Butterflys
fetching id for Little Queenie by Bill Black's Combo ...
fetching id for Yes I Do by Solomon Burke ...
fetching id for Haunted House by Jumpin' Gene Simmons ...
fetching id for House Of The Rising Sun by The Animals ...
fetching id for Slow Down by The Beatles ...
fetching id for Save It For Me by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Out Of Sight by James Brown And His Orchestra ...
--> [error] Beach Girl by Pat Boone
fetching id for Try Me by Jimmy Hughes ...
fetching id for Pearly Shells (Popo O Ewa) by Burl Ives ...
fetching id for I Wouldn't Trade You For The World by The Bachelors ...
fetching id for The Anaheim, Azusa & Cucamonga Sewing Circle, Book Review And Timing Associ by Jan & Dean ...
fetching id for L-O-V-E by Nat King Cole ...
fetching id for I See You by Cathy & Joe ...
fetching id for The Things In This House by Bobby Darin ...
--> [error] Out Of Sight by James Brown And His Orchestra
fetching id for The Dartell Stomp by The Mustangs ...
--> [error] The Things In This House by Bobby Darin
fetching id for Don't Spread It Around by Barbara Lynn ...
fetching id for I Can't Believe What You Say (For Seeing What You Do) by Ike & Tina Turner ...
fetching id for Where Did Our Love Go by The Supremes ...
fetching id for You'll Never Get To Heaven (If You Break My Heart) by Dionne Warwick ...
fetching id for Maybelline by Johnny Rivers ...
fetching id for A Hard Day's Night by The Beatles ...
fetching id for Michael by Trini Lopez ...
fetching id for She Wants T' Swim by Chubby Checker ...
fetching id for The Cat by Jimmy Smith ...
fetching id for It's All Over by Ben E. King ...
--> [error] The Dartell Stomp by The Mustangs
fetching id for Somebody New by Chuck Jackson ...
fetching id for I Guess I'm Crazy by Jim Reeves ...
fetching id for Just A Moment Ago by Soul Sisters ...
--> [error] Somebody New by Chuck Jackson
fetching id for Everybody Loves Somebody by Dean Martin ...
fetching id for Because by The Dave Clark Five ...
--> [error] Just A Moment Ago by Soul Sisters
fetching id for Always Together by Al Martino ...
fetching id for In The Misty Moonlight by Jerry Wallace ...
fetching id for Selfish One by Jackie Ross ...
fetching id for Some Day We're Gonna Love Again by The Searchers ...
fetching id for (There's) Always Something There To Remind Me by Lou Johnson ...
fetching id for Candy To Me by Eddie Holland ...
fetching id for Knock! Knock! (Who's There?) by The Orlons ...
fetching id for Yet...I Know (Et Pourtant) by Steve Lawrence ...
--> [error] Some Day We're Gonna Love Again by The Searchers
fetching id for It's For You by Cilla Black ...
--> [error] Yet...I Know (Et Pourtant) by Steve Lawrence
fetching id for Till The End Of Time by The Ray Charles Singers ...
fetching id for Soon I'll Wed My Love by John Gary ...
fetching id for Clinging Vine by Bobby Vinton ...
fetching id for C'mon And Swim by Bobby Freeman ...
fetching id for It's All Over Now by The Rolling Stones ...
fetching id for Under The Boardwalk by The Drifters ...
fetching id for Say You by Ronnie Dove ...
fetching id for Maybe I Know by Lesley Gore ...
fetching id for He's In Town by The Tokens ...
fetching id for You Never Can Tell by Chuck Berry ...
fetching id for There's Nothing I Can Say by Rick Nelson ...
fetching id for Gonna Send You Back To Walker (Gonna Send You Back To Georgia) by The Animals ...
fetching id for If I Fell by The Beatles ...
fetching id for I've Got No Time To Lose by Carla Thomas ...
fetching id for Hold Me by P.J. Proby ...
fetching id for Society Girl by The Rag Dolls ...
fetching id for Gator Tails And Monkey Ribs by The Spats Featuring Dick Johnson ...
--> [error] Till The End Of Time by The Ray Charles Singers
fetching id for Sally Was A Good Old Girl by Fats Domino ...
--> [error] Gator Tails And Monkey Ribs by The Spats Featuring Dick Johnson
fetching id for And I Love Her by The Beatles ...
fetching id for How Do You Do It? by Gerry And The Pacemakers ...
fetching id for Walk-Don't Run '64 by The Ventures ...
fetching id for Sweet William by Millie Small ...
fetching id for Worry by Johnny Tillotson ...
fetching id for When You Loved Me by Brenda Lee ...
--> [error] Sally Was A Good Old Girl by Fats Domino
fetching id for Ringo's Theme (This Boy) by George Martin And His Orch. ...
--> [error] When You Loved Me by Brenda Lee
fetching id for Invisible Tears by The Ray Conniff Singers ...
--> [error] Ringo's Theme (This Boy) by George Martin And His Orch.
fetching id for Where Love Has Gone by Jack Jones ...
fetching id for I Stand Accused by Jerry Butler ...
fetching id for Sincerely by The 4 Seasons ...
fetching id for Lovers Always Forgive by Gladys Knight And The Pips ...
fetching id for I Wanna Thank You by The Enchanters ...
fetching id for Rockin' Robin by The Rivieras ...
fetching id for Someone, Someone by Brian Poole And The Tremeloes ...
fetching id for The Clock by Baby Washington ...
fetching id for Wishin' And Hopin' by Dusty Springfield ...
fetching id for People Say by The Dixie Cups ...
fetching id for Such A Night by Elvis Presley With The Jordanaires ...
--> [error] Invisible Tears by The Ray Conniff Singers
fetching id for Just Be True by Gene Chandler ...
fetching id for I'll Cry Instead by The Beatles ...
fetching id for Ain't She Sweet by The Beatles ...
fetching id for I Want You To Meet My Baby by Eydie Gorme ...
fetching id for Johnny B. Goode by Dion Di Muci ...
--> [error] I Want You To Meet My Baby by Eydie Gorme
fetching id for Me Japanese Boy I Love You by Bobby Goldsboro ...
--> [error] Johnny B. Goode by Dion Di Muci
fetching id for A Quiet Place by Garnet Mimms & The Enchanters ...
fetching id for Can't Get Over (The Bossa Nova) by Eydie Gorme ...
--> [error] Me Japanese Boy I Love You by Bobby Goldsboro
fetching id for One More Tear by The Raindrops ...
--> [error] Can't Get Over (The Bossa Nova) by Eydie Gorme
fetching id for Rag Doll by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Steal Away by Jimmy Hughes ...
fetching id for The Little Old Lady (From Pasadena) by Jan & Dean ...
fetching id for Handy Man by Del Shannon ...
fetching id for Hey Girl Don't Bother Me by The Tams ...
fetching id for I Wanna Love Him So Bad by The Jelly Beans ...
fetching id for (You Don't Know) How Glad I Am by Nancy Wilson ...
fetching id for Tell Me (You're Coming Back) by The Rolling Stones ...
fetching id for I'll Keep You Satisfied by Billy J. Kramer With The Dakotas ...
--> [error] One More Tear by The Raindrops
fetching id for Little Latin Lupe Lu by The Kingsmen ...
fetching id for I'll Always Love You by Brenda Holloway ...
fetching id for Everybody Needs Somebody To Love by Solomon Burke ...
fetching id for If I'm A Fool For Loving You by Bobby Wood ...
fetching id for A House Is Not A Home by Dionne Warwick ...
fetching id for I Don't Care (Just As Long As You Love Me) by Buck Owens ...
fetching id for A Taste Of Honey by Tony Bennett ...
fetching id for Lover's Prayer by Wallace Brothers ...
fetching id for I Get Around by The Beach Boys ...
fetching id for She's The One by The Chartbusters ...
fetching id for Keep On Pushing by The Impressions ...
fetching id for Mixed-Up, Shook-Up, Girl by Patty & The Emblems ...
fetching id for Angelito by Rene & Rene ...
fetching id for A Tear Fell by Ray Charles ...
fetching id for Looking For Love by Connie Francis ...
fetching id for Hello Mudduh, Hello Fadduh! (A Letter From Camp) (New 1964 Version) by Allan Sherman ...
fetching id for A House Is Not A Home by Brook Benton ...
fetching id for Thank You Baby by The Shirelles ...
fetching id for Frankie And Johnny by The Greenwood County Singers ...
--> [error] A Tear Fell by Ray Charles
fetching id for Father Sebastian by The Ramblers ...
--> [error] Frankie And Johnny by The Greenwood County Singers
fetching id for Squeeze Her-Tease Her (But Love Her) by Jackie Wilson ...
fetching id for Dang Me by Roger Miller ...
fetching id for Sugar Lips by Al (He's the King) Hirt ...
--> [error] Father Sebastian by The Ramblers
fetching id for The Girl From Ipanema by Stan Getz/Astrud Gilberto ...
--> [error] Sugar Lips by Al (He's the King) Hirt
fetching id for Nobody I Know by Peter And Gordon ...
fetching id for I Like It Like That by The Miracles ...
fetching id for Love Is All We Need by Vic Dana ...
fetching id for No One To Cry To by Ray Charles ...
fetching id for Summer Means Fun by Bruce & Terry ...
fetching id for Soul Dressing by Booker T. & The MG's ...
fetching id for I Can't Get You Out Of My Heart by Al Martino ...
fetching id for Memphis by Johnny Rivers ...
fetching id for Al-Di-La by The Ray Charles Singers ...
--> [error] No One To Cry To by Ray Charles
fetching id for Can't You See That She's Mine by The Dave Clark Five ...
fetching id for You're My World by Cilla Black ...
fetching id for Try It Baby by Marvin Gaye ...
fetching id for I Believe by The Bachelors ...
fetching id for Farmer John by The Premiers ...
fetching id for Do I Love You? by The Ronettes ...
fetching id for I'm Into Somethin' Good by Earl-Jean ...
fetching id for I Should Have Known Better by The Beatles ...
fetching id for Sole Sole Sole by Siw Malmkvist-Umberto Marcato ...
--> [error] Al-Di-La by The Ray Charles Singers
fetching id for You're My Remedy by The Marvelettes ...
fetching id for Sailor Boy by The Chiffons ...
fetching id for Silly Ol' Summertime by The New Christy Minstrels ...
--> [error] Sole Sole Sole by Siw Malmkvist-Umberto Marcato
fetching id for It's A Cotton Candy World by Jerry Wallace ...
--> [error] Silly Ol' Summertime by The New Christy Minstrels
fetching id for People by Barbra Streisand ...
fetching id for Good Times by Sam Cooke ...
fetching id for Don't Throw Your Love Away by The Searchers ...
fetching id for My Boy Lollipop by Millie Small ...
fetching id for Don't Let The Sun Catch You Crying by Gerry And The Pacemakers ...
fetching id for Anyone Who Knows What Love Is (will understand) by Irma Thomas ...
fetching id for Share Your Love With Me by Bobby Bland ...
fetching id for I Want To Hold Your Hand by Boston Pops Orchestra Arthur Fiedler ...
--> [error] It's A Cotton Candy World by Jerry Wallace
fetching id for Oh! Baby (We Got A Good Thing Goin') by Barbara Lynn ...
fetching id for Baby Come Home by Ruby And The Romantics ...
fetching id for Bama Lama Bama Loo by Little Richard ...
fetching id for Sunny by Neil Sedaka ...
fetching id for One Piece Topless Bathing Suit by The Rip Chords ...
fetching id for You're No Good by The Swinging Blue Jeans ...
fetching id for Don't Worry Baby by The Beach Boys ...
fetching id for Bad To Me by Billy J. Kramer With The Dakotas ...
--> [error] I Want To Hold Your Hand by Boston Pops Orchestra Arthur Fiedler
fetching id for Hey Harmonica Man by Stevie Wonder ...
fetching id for Alone by The 4 Seasons ...
fetching id for What Have I Got Of My Own by Trini Lopez ...
fetching id for Tennessee Waltz by Sam Cooke ...
fetching id for No Particular Place To Go by Chuck Berry ...
fetching id for I Still Get Jealous by Louis Armstrong And The All Stars ...
fetching id for Remember Me by Rita Pavone ...
fetching id for Girls by Major Lance ...
fetching id for I'm Happy Just To Dance With You by The Beatles ...
fetching id for All Grown Up by The Crystals ...
fetching id for Bachelor Boy by Cliff Richard And The Shadows ...
fetching id for A World Without Love by Peter And Gordon ...
fetching id for Chapel Of Love by The Dixie Cups ...
fetching id for I'll Be In Trouble by The Temptations ...
fetching id for Little Children by Billy J. Kramer With The Dakotas ...
--> [error] I Still Get Jealous by Louis Armstrong And The All Stars
fetching id for The World I Used To Know by Jimmie Rodgers ...
fetching id for Not Fade Away by The Rolling Stones ...
fetching id for Something You Got by Alvin Robinson ...
fetching id for Beg Me by Chuck Jackson ...
fetching id for The First Night Of The Full Moon by Jack Jones ...
--> [error] Something You Got by Alvin Robinson
fetching id for Peg O' My Heart by Robert Maxwell His Harp And Orchestra ...
--> [error] The First Night Of The Full Moon by Jack Jones
fetching id for I Can't Hear You by Betty Everett ...
fetching id for The Ferris Wheel by The Everly Brothers ...
fetching id for It Ain't No Use by Major Lance ...
fetching id for The Mexican Shuffle by Herb Alpert's Tijuana Brass ...
--> [error] Peg O' My Heart by Robert Maxwell His Harp And Orchestra
fetching id for A Shot In The Dark by Henry Mancini And His Orchestra ...
--> [error] The Mexican Shuffle by Herb Alpert's Tijuana Brass
fetching id for Love Me With All Your Heart (Cuando Calienta El Sol) by The Ray Charles Singers ...
--> [error] A Shot In The Dark by Henry Mancini And His Orchestra
fetching id for Yesterday's Gone by Chad & Jeremy ...
fetching id for Walk On By by Dionne Warwick ...
fetching id for What's The Matter With You Baby by Marvin Gaye & Mary Wells ...
--> [error] Love Me With All Your Heart (Cuando Calienta El Sol) by The Ray Charles Singers
fetching id for Lazy Elsie Molly by Chubby Checker ...
fetching id for Alone With You by Brenda Lee ...
fetching id for Kick That Little Foot Sally Ann by Round Robin ...
--> [error] Alone With You by Brenda Lee
fetching id for Don't Take Your Love From Me by Gloria Lynne ...
fetching id for Hickory, Dick And Doc by Bobby Vee ...
fetching id for It Will Stand by The Showmen ...
fetching id for I'm The One by Gerry And The Pacemakers ...
fetching id for She's My Girl by Bobby Shafto ...
--> [error] Kick That Little Foot Sally Ann by Round Robin
fetching id for Love Me Do by The Beatles ...
fetching id for Today by The New Christy Minstrels ...
fetching id for Beans In My Ears by The Serendipity Singers ...
fetching id for I'll Touch A Star by Terry Stafford ...
fetching id for Diane by The Bachelors ...
fetching id for Hello, Dolly! by Louis Armstrong And The All Stars ...
--> [error] She's My Girl by Bobby Shafto
fetching id for My Guy by Mary Wells ...
fetching id for Tell Me Why by Bobby Vinton ...
fetching id for Giving Up by Gladys Knight And The Pips ...
fetching id for Milord by Bobby Darin ...
fetching id for Party Girl by Bernadette Carroll ...
fetching id for Just Ain't Enough Love by Eddie Holland ...
fetching id for A Little Toy Balloon by Danny Williams ...
--> [error] Hello, Dolly! by Louis Armstrong And The All Stars
fetching id for If You See My Love by Lenny Welch ...
fetching id for Oh, Rock My Soul (Part I) by Peter, Paul & Mary ...
fetching id for My Heart Skips A Beat by Buck Owens ...
fetching id for Like Columbus Did by The Reflections ...
fetching id for Jamaica Ska by The Ska Kings ...
--> [error] A Little Toy Balloon by Danny Williams
fetching id for It's A Sin To Tell A Lie by Tony Bennett ...
fetching id for Tears And Roses by Al Martino ...
fetching id for Do You Love Me by The Dave Clark Five ...
fetching id for Every Little Bit Hurts by Brenda Holloway ...
fetching id for My Baby Don't Dig Me by Ray Charles and his Orchestra ...
--> [error] Jamaica Ska by The Ska Kings
fetching id for The French Song by Lucille Starr ...
--> [error] My Baby Don't Dig Me by Ray Charles and his Orchestra
fetching id for The World Of Lonely People by Anita Bryant ...
--> [error] The French Song by Lucille Starr
fetching id for Yesterday's Gone by The Overlanders ...
fetching id for Taste Of Tears by Johnny Mathis ...
fetching id for Dream Lover by The Paris Sisters ...
--> [error] The World Of Lonely People by Anita Bryant
fetching id for Viva Las Vegas by Elvis Presley ...
fetching id for All My Loving by The Hollyridge Strings ...
--> [error] Dream Lover by The Paris Sisters
fetching id for It's All Over Now by The Valentinos ...
fetching id for I Don't Want To Hear Anymore by Jerry Butler ...
fetching id for The Things That I Used To Do by James Brown And His Orchestra ...
--> [error] All My Loving by The Hollyridge Strings
fetching id for Once Upon A Time by Marvin Gaye & Mary Wells ...
--> [error] The Things That I Used To Do by James Brown And His Orchestra
fetching id for What'd I Say by Elvis Presley With The Jubilee Four And Carole Lombard Quartet ...
fetching id for (Just Like) Romeo & Juliet by The Reflections ...
fetching id for P.S. I Love You by The Beatles ...
fetching id for Rock Me Baby by B.B. King ...
fetching id for Be Anything (But Be Mine) by Connie Francis ...
fetching id for Cotton Candy by Al Hirt ...
fetching id for Another Cup Of Coffee by Brook Benton ...
fetching id for Too Late To Turn Back Now by Brook Benton ...
fetching id for Everybody Knows by Steve Lawrence ...
fetching id for I Wanna Be Loved by Dean And Jean ...
fetching id for Four By The Beatles by The Beatles ...
fetching id for Sie Liebt Dich (She Loves You) by Die Beatles ...
--> [error] Everybody Knows by Steve Lawrence
fetching id for Three Window Coupe by The Rip Chords ...
fetching id for I Don't Want To Be Hurt Anymore by Nat King Cole ...
fetching id for I Don't Wanna Be A Loser by Lesley Gore ...
fetching id for It's Over by Roy Orbison ...
fetching id for Viva Las Vegas by Elvis Presley With The Jordanaires ...
--> [error] Sie Liebt Dich (She Loves You) by Die Beatles
fetching id for I Rise, I Fall by Johnny Tillotson ...
fetching id for Good Golly Miss Molly by The Swinging Blue Jeans ...
fetching id for Goodbye Baby (Baby Goodbye) by Solomon Burke ...
fetching id for Sugar And Spice by The Searchers ...
fetching id for Hurt By Love by Inez Foxx ...
fetching id for Donnie by The Bermudas ...
fetching id for Rules Of Love by The Orlons ...
fetching id for Be My Girl by The Four-Evers ...
fetching id for A World Without Love by Bobby Rydell ...
fetching id for My Dreams by Brenda Lee ...
--> [error] Be My Girl by The Four-Evers
fetching id for Night Time Is The Right Time by Rufus & Carla ...
fetching id for Bits And Pieces by The Dave Clark Five ...
fetching id for I'm So Proud by The Impressions ...
--> [error] My Dreams by Brenda Lee
fetching id for Ronnie by The 4 Seasons Featuring the "Sound of Frankie Valli" ...
fetching id for Wish Someone Would Care by Irma Thomas ...
fetching id for Wrong For Each Other by Andy Williams ...
fetching id for Tall Cool One by The Wailers ...
fetching id for Gonna Get Along Without You Now by Skeeter Davis ...
fetching id for Gonna' Get Along Without You Now by Tracey Dey ...
--> [error] Wrong For Each Other by Andy Williams
fetching id for I Knew It All The Time by The Dave Clark Five ...
--> [error] Gonna' Get Along Without You Now by Tracey Dey
fetching id for One Way Love by The Drifters ...
fetching id for Kiko by Jimmy McGriff ...
fetching id for That's Really Some Good by Rufus & Carla ...
fetching id for Good Time Tonight by The Soul Sisters ...
fetching id for The Magic Of Our Summer Love by The Tymes ...
fetching id for Dead Man's Curve by Jan & Dean ...
fetching id for Do You Want To Know A Secret by The Beatles ...
fetching id for White On White by Danny Williams ...
fetching id for The Very Thought Of You by Rick Nelson ...
fetching id for Shangri-La by Robert Maxwell His Harp And Orchestra ...
--> [error] I Knew It All The Time by The Dave Clark Five
fetching id for Kiss Me Quick by Elvis Presley With The Jordanaires ...
--> [error] Shangri-La by Robert Maxwell His Harp And Orchestra
fetching id for Whenever He Holds You by Bobby Goldsboro ...
fetching id for One Girl by Garnet Mimms ...
fetching id for Yesterday's Hero by Gene Pitney ...
fetching id for Trouble I've Had by Clarence Ashe ...
fetching id for Don't Let The Rain Come Down (Crooked Little Man) by The Serendipity Singers ...
fetching id for Suspicion by Terry Stafford ...
fetching id for Can't Buy Me Love by The Beatles ...
fetching id for Shangri-La by Vic Dana ...
fetching id for Tea For Two by Nino Tempo & April Stevens ...
fetching id for Soul Serenade by King Curtis ...
fetching id for The Loneliest Night by Dale & Grace ...
fetching id for Carol by Tommy Roe ...
fetching id for Winkin', Blinkin' And Nod by The Simon Sisters ...
fetching id for From Russia With Love by The Village Stompers ...
fetching id for Tell Me Mamma by Christine Quaite ...
fetching id for Tequila by Bill Black's Combo ...
fetching id for Across The Street by Lenny O'Henry ...
fetching id for Let's Have A Party by The Rivieras ...
fetching id for Money by The Kingsmen ...
fetching id for My Girl Sloopy by The Vibrations ...
--> [error] Tell Me Mamma by Christine Quaite
fetching id for The Shoop Shoop Song (It's In His Kiss) by Betty Everett ...
fetching id for The Pink Panther Theme by Henry Mancini And His Orchestra ...
--> [error] My Girl Sloopy by The Vibrations
fetching id for (The Best Part Of) Breakin' Up by The Ronettes ...
fetching id for Kiss Me Sailor by Diane Renay ...
fetching id for That's The Way Boys Are by Lesley Gore ...
fetching id for Ain't That Just Like Me by The Searchers ...
fetching id for Loving You More Every Day by Etta James ...
--> [error] The Pink Panther Theme by Henry Mancini And His Orchestra
fetching id for Who's Afraid Of Virginia Woolf? (Part I) by Jimmy Smith ...
fetching id for Big Boss Line by Jackie Wilson ...
fetching id for Long Tall Shorty by Tommy Tucker ...
--> [error] Loving You More Every Day by Etta James
fetching id for Security by Otis Redding ...
fetching id for Glad All Over by The Dave Clark Five ...
fetching id for The Matador by Major Lance ...
fetching id for Charade by Sammy Kaye And His Orchestra ...
--> [error] Long Tall Shorty by Tommy Tucker
fetching id for Ebb Tide by Lenny Welch ...
fetching id for Thank You Girl by The Beatles ...
fetching id for You're A Wonderful One by Marvin Gaye ...
fetching id for Forever by Pete Drake And His Talking Steel Guitar ...
--> [error] Charade by Sammy Kaye And His Orchestra
fetching id for Slip-In Mules (No High Heel Sneakers) by Sugar Pie DeSanto ...
fetching id for In My Lonely Room by Martha & The Vandellas ...
fetching id for Look Homeward Angel by The Monarchs ...
--> [error] Forever by Pete Drake And His Talking Steel Guitar
fetching id for That's When It Hurts by Ben E. King ...
fetching id for Hey, Mr. Sax Man by Boots Randolph ...
fetching id for Little Donna by The Rivieras ...
fetching id for Have I Stayed Away Too Long by Bobby Bare ...
fetching id for The Way You Do The Things You Do by The Temptations ...
--> [error] Look Homeward Angel by The Monarchs
fetching id for Needles And Pins by The Searchers ...
fetching id for Stay Awhile by Dusty Springfield ...
fetching id for Can You Do It by The Contours ...
fetching id for Hey, Bobba Needle by Chubby Checker ...
fetching id for Nadine (Is It You?) by Chuck Berry ...
fetching id for The New Girl In School by Jan & Dean ...
fetching id for Baby Baby Baby by Anna King-Bobby Byrd ...
fetching id for Giving Up On Love by Jerry Butler ...
fetching id for The Wonder Of You by Ray Peterson ...
fetching id for Soul Hootenanny (Pt. I) by Gene Chandler ...
--> [error] Baby Baby Baby by Anna King-Bobby Byrd
fetching id for Caldonia by James Brown And His Orchestra ...
--> [error] Soul Hootenanny (Pt. I) by Gene Chandler
fetching id for Big Party by Barbara & The Browns ...
fetching id for The Little White Cloud That Cried by Wayne Newton ...
--> [error] Caldonia by James Brown And His Orchestra
fetching id for Ain't Nothing You Can Do by Bobby Bland ...
--> [error] The Little White Cloud That Cried by Wayne Newton
fetching id for She Loves You by The Beatles ...
fetching id for Think by Brenda Lee ...
fetching id for Make Me Forget by Bobby Rydell ...
fetching id for All My Loving by The Beatles ...
fetching id for T'ain't Nothin' To Me by The Coasters ...
fetching id for I Should Care by Gloria Lynne ...
fetching id for Come To Me by Otis Redding ...
fetching id for Little Boxes by The Womenfolk ...
fetching id for Gee by The Pixies Three ...
fetching id for Where Does Love Go by Freddie Scott ...
--> [error] Little Boxes by The Womenfolk
fetching id for I Want To Hold Your Hand by The Beatles ...
fetching id for Please Please Me by The Beatles ...
--> [error] Where Does Love Go by Freddie Scott
fetching id for My Heart Belongs To Only You by Bobby Vinton ...
fetching id for Stay by The 4 Seasons ...
fetching id for Hippy Hippy Shake by The Swinging Blue Jeans ...
fetching id for Fun, Fun, Fun by The Beach Boys ...
fetching id for Dawn (Go Away) by The 4 Seasons ...
fetching id for I Can't Stand It by Soul Sisters ...
fetching id for Ain't Gonna Tell Anybody by Jimmy Gilmer And The Fireballs ...
fetching id for You Can't Do That by The Beatles ...
fetching id for Castles In The Sand by Little Stevie Wonder ...
--> [error] Ain't Gonna Tell Anybody by Jimmy Gilmer And The Fireballs
fetching id for Book Of Love by The Raindrops ...
--> [error] Castles In The Sand by Little Stevie Wonder
fetching id for Our Everlasting Love by Ruby And The Romantics ...
fetching id for Mexican Drummer Man by Herb Alpert's Tijuana Brass ...
--> [error] Book Of Love by The Raindrops
fetching id for Be Anything (But Be Mine) by Gloria Lynne ...
--> [error] Mexican Drummer Man by Herb Alpert's Tijuana Brass
fetching id for When Joanna Loved Me by Tony Bennett ...
fetching id for I'm The Lonely One by Cliff Richard ...
fetching id for I'm Confessin' (That I Love You) by Nino Tempo & April Stevens ...
fetching id for Hi-Heel Sneakers by Tommy Tucker ...
fetching id for Tell It On The Mountain by Peter, Paul & Mary ...
fetching id for Java by Al (He's the King) Hirt ...
--> [error] Be Anything (But Be Mine) by Gloria Lynne
fetching id for Kissin' Cousins by Elvis Presley With The Jordanaires ...
--> [error] Java by Al (He's the King) Hirt
fetching id for We Love You Beatles by The Carefrees ...
fetching id for I Saw Her Standing There by The Beatles ...
fetching id for You Lied To Your Daddy by The Tams ...
fetching id for It's All Right (You're Just In Love) by The Tams ...
fetching id for Somebody Stole My Dog by Rufus Thomas ...
fetching id for Why by The Beatles With Tony Sheridan ...
--> [error] We Love You Beatles by The Carefrees
fetching id for Vanishing Point by The Marketts ...
fetching id for High On A Hill by Scott English ...
fetching id for Blue Winter by Connie Francis ...
fetching id for It Hurts Me by Elvis Presley With The Jordanaires ...
fetching id for Navy Blue by Diane Renay ...
fetching id for Rip Van Winkle by The Devotions ...
fetching id for I Love You More And More Every Day by Al Martino ...
--> [error] Vanishing Point by The Marketts
fetching id for My Heart Cries For You by Ray Charles ...
fetching id for Hey Jean, Hey Dean by Dean And Jean ...
fetching id for From Me To You by The Beatles ...
fetching id for I'll Make You Mine by Bobby Vee With The Eligibles ...
fetching id for Congratulations by Rick Nelson ...
fetching id for There's A Place by The Beatles ...
fetching id for Sha-La-La by The Shirelles ...
fetching id for Roll Over Beethoven by The Beatles ...
fetching id for Hand It Over by Chuck Jackson ...
fetching id for I'm On Fire by Jerry Lee Lewis ...
fetching id for People by Nat King Cole ...
fetching id for See The Funny Little Clown by Bobby Goldsboro ...
--> [error] My Heart Cries For You by Ray Charles
fetching id for Understand Your Man by Johnny Cash ...
fetching id for Penetration by The Pyramids ...
fetching id for Baby, Don't You Cry (The New Swingova Rhythm) by Ray Charles and his Orchestra ...
fetching id for Worried Guy by Johnny Tillotson ...
fetching id for He's A Good Guy (Yes He Is) by The Marvelettes ...
fetching id for (You Can't Let The Boy Overpower) The Man In You by The Miracles ...
fetching id for Love With The Proper Stranger by Jack Jones ...
fetching id for To Each His Own by The Tymes ...
fetching id for A Letter To The Beatles by The Four Preps ...
fetching id for I Can't Wait Until I See My Baby by Justine Washington ...
--> [error] Baby, Don't You Cry (The New Swingova Rhythm) by Ray Charles and his Orchestra
fetching id for How Blue Can You Get by B.B. King And His Orchestra ...
--> [error] I Can't Wait Until I See My Baby by Justine Washington
fetching id for California Sun by The Rivieras ...
fetching id for Good News by Sam Cooke ...
fetching id for I Only Want To Be With You by Dusty Springfield ...
fetching id for Stardust by Nino Tempo & April Stevens ...
fetching id for Oh Baby Don't You Weep (Part 1) by James Brown And The Famous Flames ...
--> [error] How Blue Can You Get by B.B. King And His Orchestra
fetching id for The Shelter Of Your Arms by Sammy Davis Jr. ...
fetching id for Who Do You Love by The Sapphires ...
fetching id for I Wish You Love by Gloria Lynne ...
fetching id for Puppy Love by Barbara Lewis ...
fetching id for Can Your Monkey Do The Dog by Rufus Thomas ...
fetching id for He'll Have To Go by Solomon Burke ...
fetching id for Young And In Love by Chris Crosby ...
fetching id for My True Carrie, Love by Nat King Cole ...
--> [error] Oh Baby Don't You Weep (Part 1) by James Brown And The Famous Flames
fetching id for Long Gone Lonesome Blues by Hank Williams Jr. ...
fetching id for (That's) What The Nitty Gritty Is by Shirley Ellis ...
fetching id for Tell Me Baby by Garnet Mimms ...
fetching id for Jailer, Bring Me Water by Trini Lopez ...
fetching id for Abigail Beecher by Freddy Cannon ...
fetching id for Stop And Think It Over by Dale & Grace ...
fetching id for Bird Dance Beat by The Trashmen ...
fetching id for My Bonnie (My Bonnie Lies Over The Ocean) by The Beatles With Tony Sheridan ...
fetching id for Miller's Cave by Bobby Bare ...
fetching id for You Don't Own Me by Lesley Gore ...
fetching id for Live Wire by Martha & The Vandellas ...
fetching id for Out Of Sight - Out Of Mind by Sunny & The Sunliners ...
--> [error] My True Carrie, Love by Nat King Cole
fetching id for Always In My Heart by Los Indios Tabajaras ...
fetching id for My Boyfriend Got A Beatle Haircut by Donna Lynn ...
--> [error] Out Of Sight - Out Of Mind by Sunny & The Sunliners
fetching id for Searchin' by Ace Cannon ...
fetching id for The Boy With The Beatle Hair by The Swans ...
fetching id for Lazy Lady by Fats Domino ...
--> [error] My Boyfriend Got A Beatle Haircut by Donna Lynn
fetching id for Stockholm by Lawrence Welk And His Orchestra ...
--> [error] Lazy Lady by Fats Domino
fetching id for Run, Run, Run by The Supremes ...
fetching id for I Wonder Who's Kissing Her Now by Bobby Darin ...
fetching id for Um, Um, Um, Um, Um, Um by Major Lance ...
fetching id for A Fool Never Learns by Andy Williams ...
--> [error] Stockholm by Lawrence Welk And His Orchestra
fetching id for What Kind Of Fool (Do You Think I Am) by The Tams ...
fetching id for Hey Little Cobra by The Rip Chords ...
fetching id for Talking About My Baby by The Impressions ...
fetching id for What's Easy For Two Is So Hard For One by Mary Wells ...
fetching id for Vaya Con Dios by The Drifters ...
fetching id for Bye Bye Barbara by Johnny Mathis ...
fetching id for He Walks Like A Man by Jody Miller ...
fetching id for Custom Machine by Bruce & Terry ...
fetching id for (The Story Of) Woman, Love And A Man (Part 1) by Tony Clarke ...
fetching id for Going Back To Louisiana by Bruce Channel ...
--> [error] Going Back To Louisiana by Bruce Channel
fetching id for For You by Rick Nelson ...
fetching id for Southtown, U.S.A. by The Dixiebelles ...
--> [error] (The Story Of) Woman, Love And A Man (Part 1) by Tony Clarke
fetching id for Out Of Limits by The Marketts ...
fetching id for Letter From Sherry by Dale Ward ...
--> [error] Southtown, U.S.A. by The Dixiebelles
fetching id for Anyone Who Had A Heart by Dionne Warwick ...
fetching id for Hooka Tooka by Chubby Checker ...
fetching id for Gonna Send You Back To Georgia (A City Slick) by Timmy Shaw ...
--> [error] Letter From Sherry by Dale Ward
fetching id for Going Going Gone by Brook Benton ...
fetching id for Come On by Tommy Roe ...
fetching id for He Says The Same Things To Me by Skeeter Davis ...
fetching id for Leaving Here by Eddie Holland ...
fetching id for All My Trials by Dick and DeeDee ...
--> [error] Gonna Send You Back To Georgia (A City Slick) by Timmy Shaw
fetching id for So Far Away by Hank Jacobs ...
fetching id for Mo-Onions by Booker T. & The MG's ...
fetching id for You Were Wrong by Z.Z. Hill ...
fetching id for It's All In The Game by Cliff Richard ...
fetching id for Surfin' Bird by The Trashmen ...
fetching id for Wow Wow Wee (He's The Boy For Me) by The Angels ...
--> [error] All My Trials by Dick and DeeDee
fetching id for That Girl Belongs To Yesterday by Gene Pitney ...
fetching id for I'll Remember (In The Still Of The Night) by Santo & Johnny ...
fetching id for 442 Glenwood Avenue by The Pixies Three ...
fetching id for Harlem Shuffle by Bob And Earl ...
fetching id for Shimmy Shimmy by The Orlons ...
fetching id for Comin' On by Bill Black's Combo ...
fetching id for Little Boxes by Pete Seeger ...
fetching id for Pink Dominos by The Crescents Featuring Chiyo ...
fetching id for Saginaw, Michigan by Lefty Frizzell ...
fetching id for Have You Ever Been Lonely (Have You Ever Been Blue) by The Caravelles ...
fetching id for Willyam, Willyam by Dee Dee Sharp ...
fetching id for Forget Him by Bobby Rydell ...
fetching id for Daisy Petal Pickin' by Jimmy Gilmer And The Fireballs ...
--> [error] Pink Dominos by The Crescents Featuring Chiyo
fetching id for There! I've Said It Again by Bobby Vinton ...
fetching id for As Usual by Brenda Lee ...
fetching id for Popsicles And Icicles by The Murmaids ...
fetching id for You'll Never Walk Alone by Patti LaBelle And The Blue Belles ...
--> [error] Daisy Petal Pickin' by Jimmy Gilmer And The Fireballs
fetching id for (It's No) Sin by The Duprees featuring Joey Vann ...
fetching id for Where Did I Go Wrong by Dee Dee Sharp ...
fetching id for How Much Can A Lonely Heart Stand by Skeeter Davis ...
fetching id for Please, Please, Please by James Brown And The Famous Flames ...
fetching id for Baby, I Love You by The Ronettes ...
fetching id for Charade by Henry Mancini And His Orchestra ...
fetching id for Drag City by Jan & Dean ...
fetching id for Somewhere by The Tymes ...
fetching id for Whispering by Nino Tempo & April Stevens ...
fetching id for The Nitty Gritty by Shirley Ellis ...
fetching id for I Can't Stop Talking About You by Steve & Eydie ...
fetching id for Comin' In The Back Door by The Baja Marimba Band ...
--> [error] You'll Never Walk Alone by Patti LaBelle And The Blue Belles
fetching id for The Little Boy by Tony Bennett ...
fetching id for Pain In My Heart by Otis Redding ...
fetching id for Tell Him by The Drew-Vels ...
fetching id for I Didn't Know What Time It Was by The Crampton Sisters ...
--> [error] Comin' In The Back Door by The Baja Marimba Band
fetching id for (I'm Watching) Every Little Move You Make by Little Peggy March ...
--> [error] I Didn't Know What Time It Was by The Crampton Sisters
fetching id for Baby What You Want Me To Do by Etta James ...
fetching id for Little Boy by The Crystals ...
fetching id for Here's A Heart by The Diplomats ...
fetching id for Strange Things Happening by Little Jr. Parker ...
--> [error] (I'm Watching) Every Little Move You Make by Little Peggy March
fetching id for True Love Goes On And On by Burl Ives ...
fetching id for Since I Fell For You by Lenny Welch ...
fetching id for When The Lovelight Starts Shining Through His Eyes by The Supremes ...
fetching id for Girls Grow Up Faster Than Boys by The Cookies ...
fetching id for Quicksand by Martha & The Vandellas ...
fetching id for My One And Only, Jimmy Boy by The Girlfriends ...
fetching id for Dumb Head by Ginny Arnell ...
--> [error] Strange Things Happening by Little Jr. Parker
fetching id for Tonight You're Gonna Fall In Love With Me by The Shirelles ...
fetching id for Watch Your Step by Brooks O'Dell ...
fetching id for If Somebody Told You by Anna King ...
fetching id for Stranger In Your Arms by Bobby Vee ...
fetching id for Ask Me by Inez Foxx ...
fetching id for Deep In The Heart Of Harlem by Clyde McPhatter ...
fetching id for Since I Found A New Love by Little Johnny Taylor ...
--> [error] Dumb Head by Ginny Arnell
fetching id for Midnight Mary by Joey Powers ...
fetching id for Dominique by The Singing Nun (Soeur Sourire) ...
--> [error] Since I Found A New Love by Little Johnny Taylor
fetching id for Wives And Lovers by Jack Jones ...
fetching id for Talk Back Trembling Lips by Johnny Tillotson ...
fetching id for That Lucky Old Sun by Ray Charles ...
--> [error] Dominique by The Singing Nun (Soeur Sourire)
fetching id for Need To Belong by Jerry Butler ...
fetching id for Can I Get A Witness by Marvin Gaye ...
fetching id for In The Summer Of His Years by Connie Francis ...
fetching id for You're No Good by Betty Everett ...
fetching id for Who Cares by Fats Domino ...
--> [error] That Lucky Old Sun by Ray Charles
fetching id for As Long As I Know He's Mine by The Marvelettes ...
fetching id for Stay With Me by Frank Sinatra ...
fetching id for Here Comes The Boy by Tracey Dey ...
fetching id for You Don't Have To Be A Baby To Cry by The Caravelles ...
fetching id for Drip Drop by Dion Di Muci ...
--> [error] Who Cares by Fats Domino
fetching id for I Gotta Dance To Keep From Crying by The Miracles ...
fetching id for Pretty Paper by Roy Orbison ...
fetching id for Tra La La La Suzy by Dean And Jean ...
fetching id for Loddy Lo by Chubby Checker ...
fetching id for The Marvelous Toy by The Chad Mitchell Trio ...
--> [error] Drip Drop by Dion Di Muci
fetching id for Today's Teardrops by Rick Nelson ...
fetching id for Snap Your Fingers by Barbara Lewis ...
fetching id for Do-Wah-Diddy by The Exciters ...
fetching id for We Belong Together by Jimmy Velvet ...
fetching id for Billie Baby by Lloyd Price ...
fetching id for His Kiss by Betty Harris ...
fetching id for Judy Loves Me by Johnny Crawford ...
--> [error] The Marvelous Toy by The Chad Mitchell Trio
fetching id for Slipin' And Slidin' by Jim and Monica ...
--> [error] Judy Loves Me by Johnny Crawford
fetching id for When You Walk In The Room by Jackie DeShannon ...
fetching id for Who's Been Sleeping In My Bed? by Linda Scott ...
fetching id for Kansas City by Trini Lopez ...
fetching id for For Your Precious Love by Garnet Mimms & The Enchanters ...
fetching id for Turn Around by Dick and DeeDee ...
--> [error] Slipin' And Slidin' by Jim and Monica
fetching id for Be True To Your School by The Beach Boys ...
fetching id for I Have A Boyfriend by The Chiffons ...
fetching id for Bon-Doo-Wah by The Orlons ...
fetching id for Long Tall Texan by Murry Kellum ...
fetching id for That Boy John by The Raindrops ...
fetching id for Please by Frank Ifield ...
--> [error] That Boy John by The Raindrops
fetching id for Come Dance With Me by Jay & The Americans ...
fetching id for Charade by Andy Williams ...
fetching id for I'm Leaving It Up To You by Dale & Grace ...
fetching id for The Boy Next Door by The Secrets ...
fetching id for Everybody by Tommy Roe ...
--> [error] Please by Frank Ifield
fetching id for In My Room by The Beach Boys ...
fetching id for Stewball by Peter, Paul & Mary ...
fetching id for Have You Heard by The Duprees featuring Joey Vann ...
fetching id for Baby Don't You Weep by Garnet Mimms & The Enchanters ...
fetching id for The Cheer Leader by Paul Petersen ...
fetching id for For Your Sweet Love by The Cascades ...
fetching id for The Feeling Is Gone by Bobby Bland ...
fetching id for Baby's Gone by Gene Thomas ...
--> [error] The Cheer Leader by Paul Petersen
fetching id for The Son Of Rebel Rouser by Duane Eddy ...
fetching id for Cold Cold Winter by The Pixies Three ...
fetching id for Coming Back To You by Maxine Brown ...
fetching id for Wonderful Summer by Robin Ward ...
fetching id for Walking The Dog by Rufus Thomas ...
fetching id for Bad Girl by Neil Sedaka ...
fetching id for She's A Fool by Lesley Gore ...
fetching id for Misery by The Dynamics ...
--> [error] Baby's Gone by Gene Thomas
fetching id for Ally Ally Oxen Free by The Kingston Trio ...
fetching id for Be Mad Little Girl by Bobby Darin ...
fetching id for I'll Search My Heart by Johnny Mathis ...
fetching id for Where Or When by The Lettermen ...
--> [error] Misery by The Dynamics
fetching id for Little Red Rooster by Sam Cooke ...
fetching id for Living A Lie by Al Martino ...
fetching id for Twenty Four Hours From Tulsa by Gene Pitney ...
fetching id for Sugar Shack by Jimmy Gilmer And The Fireballs ...
--> [error] Where Or When by The Lettermen
fetching id for It's All Right by The Impressions ...
fetching id for Rags To Riches by Sunny & The Sunliners ...
fetching id for The Impossible Happened by Little Peggy March ...
fetching id for Dawn by The David Rockingham Trio ...
--> [error] Dawn by The David Rockingham Trio
fetching id for Begging To You by Marty Robbins ...
--> [error] The Impossible Happened by Little Peggy March
fetching id for Hootenanny Saturday Night by The Brothers Four ...
fetching id for Did You Have A Happy Birthday? by Paul Anka ...
--> [error] Hootenanny Saturday Night by The Brothers Four
fetching id for Never Love A Robin by Bobby Vee ...
--> [error] Did You Have A Happy Birthday? by Paul Anka
fetching id for Washington Square by The Village Stompers ...
fetching id for Hey Little Girl by Major Lance ...
fetching id for Deep Purple by Nino Tempo & April Stevens ...
fetching id for I Wonder What She's Doing Tonight by Barry & The Tamerlanes ...
fetching id for You're Good For Me by Solomon Burke ...
fetching id for Maria Elena by Los Indios Tabajaras ...
fetching id for (Down At) Papa Joe's by The Dixiebelles ...
fetching id for Bossa Nova Baby by Elvis Presley With The Jordanaires ...
fetching id for Yesterday And You (Armen's Theme) by Bobby Vee ...
fetching id for Baby I Do Love You by The Galens ...
fetching id for Gotta Lotta Love by Steve Alaimo ...
--> [error] (Down At) Papa Joe's by The Dixiebelles
fetching id for Thank You And Goodnight by The Angels ...
fetching id for Why Do Kids Grow Up by Randy & The Rainbows ...
fetching id for 500 Miles Away From Home by Bobby Bare ...
fetching id for Walking Proud by Steve Lawrence ...
--> [error] Gotta Lotta Love by Steve Alaimo
fetching id for She's Got Everything by The Essex Featuring Anita Humes ...
fetching id for I Got A Woman by Freddie Scott ...
--> [error] Walking Proud by Steve Lawrence
fetching id for Young Wings Can Fly (Higher Than You Know) by Ruby And The Romantics ...
fetching id for Sue's Gotta Be Mine by Del Shannon ...
fetching id for Crossfire Time by Dee Clark ...
fetching id for Surfer Street by The Allisons ...
--> [error] I Got A Woman by Freddie Scott
fetching id for I Adore Him by The Angels ...
fetching id for Fools Rush In by Rick Nelson ...
fetching id for Down The Aisle (Wedding Song) by Patti LaBelle And The Blue Belles ...
--> [error] Surfer Street by The Allisons
fetching id for Saturday Night by The New Christy Minstrels ...
fetching id for The Matador by Johnny Cash ...
fetching id for Shirl Girl by Wayne Newton And The Newton Brothers ...
--> [error] Down The Aisle (Wedding Song) by Patti LaBelle And The Blue Belles
fetching id for I Am A Witness by Tommy Hunt ...
--> [error] Shirl Girl by Wayne Newton And The Newton Brothers
fetching id for Reach Out For Me by Lou Johnson ...
--> [error] I Am A Witness by Tommy Hunt
fetching id for Any Other Way by Chuck Jackson ...
fetching id for I'm Down To My Last Heartbreak by Wilson Pickett ...
fetching id for Hi Diddle Diddle by Inez Foxx ...
--> [error] Reach Out For Me by Lou Johnson
fetching id for Hey Lover by Debbie Dovale ...
--> [error] Hi Diddle Diddle by Inez Foxx
fetching id for I Can't Stay Mad At You by Skeeter Davis ...
fetching id for Mean Woman Blues by Roy Orbison ...
fetching id for Cry To Me by Betty Harris ...
fetching id for Your Other Love by Connie Francis ...
fetching id for Misty by Lloyd Price ...
fetching id for Wild! by Dee Dee Sharp ...
fetching id for Witchcraft by Elvis Presley With The Jordanaires ...
--> [error] Hey Lover by Debbie Dovale
fetching id for Unchained Melody by Vito & The Salutations ...
fetching id for Rumble by Jack Nitzsche ...
fetching id for Now! by Lena Horne ...
fetching id for Baby, What's Wrong by Lonnie Mack ...
fetching id for Baby, We've Got Love by Johnnie Taylor ...
fetching id for Busted by Ray Charles and his Orchestra ...
--> [error] Rumble by Jack Nitzsche
fetching id for That Sunday, That Summer by Nat King Cole ...
fetching id for You Lost The Sweetest Boy by Mary Wells ...
fetching id for Talk To Me by Sunny & The Sunglows ...
fetching id for Donna The Prima Donna by Dion (Di Muci) ...
fetching id for Be My Baby by The Ronettes ...
fetching id for Cross Fire! by The Orlons ...
fetching id for A Fine Fine Boy by Darlene Love ...
fetching id for Don't Wait Too Long by Tony Bennett ...
fetching id for Funny How Time Slips Away by Johnny Tillotson ...
fetching id for Gotta Travel On by Timi Yuro ...
--> [error] Donna The Prima Donna by Dion (Di Muci)
fetching id for I Could Have Danced All Night by Ben E. King ...
fetching id for Your Teenage Dreams by Johnny Mathis ...
--> [error] Gotta Travel On by Timi Yuro
fetching id for Two-Ten, Six-Eighteen (Doesn't Anybody Know My Name) by Jimmie Rodgers ...
fetching id for Cuando Calienta El Sol (When The Sun Is Hot) by Steve Allen and His Orchestra with The Copacabana Trio ...
--> [error] Your Teenage Dreams by Johnny Mathis
fetching id for Please Don't Kiss Me Again by The Charmettes ...
fetching id for Don't Think Twice, It's All Right by Peter, Paul & Mary ...
fetching id for Cry Baby by Garnet Mimms & The Enchanters ...
fetching id for The Grass Is Greener by Brenda Lee ...
fetching id for Blue Bayou by Roy Orbison ...
fetching id for Blue Velvet by Bobby Vinton ...
fetching id for New Mexican Rose by The 4 Seasons ...
fetching id for Blue Guitar by Richard Chamberlain ...
--> [error] Cuando Calienta El Sol (When The Sun Is Hot) by Steve Allen and His Orchestra with The Copacabana Trio
fetching id for Point Panic by The Surfaris ...
fetching id for Night Life by Rusty Draper ...
fetching id for Come Back by Johnny Mathis ...
fetching id for Sally, Go 'round The Roses by The Jaynetts ...
fetching id for Stop Monkeyin' Aroun' by The Dovells ...
fetching id for Honolulu Lulu by Jan & Dean ...
fetching id for Workout Stevie, Workout by Little Stevie Wonder ...
--> [error] Blue Guitar by Richard Chamberlain
fetching id for Red Sails In The Sunset by Fats Domino ...
fetching id for I'll Take You Home by The Drifters ...
fetching id for Monkey-Shine by Bill Black's Combo ...
--> [error] Workout Stevie, Workout by Little Stevie Wonder
fetching id for Two Tickets To Paradise by Brook Benton ...
fetching id for Part Time Love by Little Johnny Taylor ...
fetching id for Bust Out by The Busters ...
fetching id for Signed, Sealed, And Delivered by James Brown And The Famous Flames ...
fetching id for We Shall Overcome by Joan Baez ...
fetching id for Saltwater Taffy by Morty Jay And The Surferin' Cats ...
--> [error] Monkey-Shine by Bill Black's Combo
fetching id for When The Boy's Happy (The Girl's Happy Too) by The Four Pennies ...
fetching id for Surfer Girl by The Beach Boys ...
fetching id for Mickey's Monkey by The Miracles ...
fetching id for Heat Wave by Martha & The Vandellas ...
fetching id for Then He Kissed Me by The Crystals ...
fetching id for A Love So Fine by The Chiffons ...
fetching id for Hello Heartache, Goodbye Love by Little Peggy March ...
--> [error] Saltwater Taffy by Morty Jay And The Surferin' Cats
fetching id for My Boyfriend's Back by The Angels ...
fetching id for (Native Girl) Elephant Walk by Donald Jenkins & The Delighters ...
--> [error] Hello Heartache, Goodbye Love by Little Peggy March
fetching id for Enamorado by Keith Colley ...
fetching id for Strange Feeling by Billy Stewart ...
fetching id for I'm Crazy 'Bout My Baby by Marvin Gaye ...
fetching id for Everybody Go Home by Eydie Gorme ...
--> [error] (Native Girl) Elephant Walk by Donald Jenkins & The Delighters
fetching id for Dear Abby by The Hearts ...
fetching id for (Theme From) Any Number Can Win by Jimmy Smith ...
fetching id for 31 Flavors by The Shirelles ...
fetching id for Little Deuce Coupe by The Beach Boys ...
fetching id for Wonderful! Wonderful! by The Tymes ...
fetching id for Only In America by Jay & The Americans ...
fetching id for If I Had A Hammer by Trini Lopez ...
fetching id for A Walkin' Miracle by The Essex Featuring Anita Humes ...
fetching id for Martian Hop by The Ran-Dells ...
fetching id for September Song by Jimmy Durante ...
fetching id for Speed Ball by Ray Stevens ...
fetching id for First Day Back At School by Paul and Paula ...
fetching id for Two Sides (To Every Story) by Etta James ...
fetching id for He's Mine (I Love Him, I Love Him, I Love Him) by Alice Wonder Land ...
--> [error] Everybody Go Home by Eydie Gorme
fetching id for Sweet Impossible You by Brenda Lee ...
--> [error] Sweet Impossible You by Brenda Lee
fetching id for Little Eeefin Annie by Joe Perkins ...
--> [error] He's Mine (I Love Him, I Love Him, I Love Him) by Alice Wonder Land
fetching id for It's A Mad, Mad, Mad, Mad World by The Shirelles ...
fetching id for The Scavenger by Dick Dale and The Del-Tones ...
fetching id for Jenny Brown by The Smothers Brothers ...
fetching id for The Monkey Time by Major Lance ...
fetching id for The Kind Of Boy You Can't Forget by The Raindrops ...
fetching id for Mockingbird by Inez Foxx with Charlie Foxx ...
fetching id for Treat My Baby Good by Bobby Darin ...
fetching id for More by Vic Dana ...
fetching id for Wham! by Lonnie Mack ...
fetching id for I'm Confessin' (That I Love You) by Frank Ifield ...
--> [error] Little Eeefin Annie by Joe Perkins
fetching id for Baby Get It (And Don't Quit It) by Jackie Wilson ...
--> [error] I'm Confessin' (That I Love You) by Frank Ifield
fetching id for My Babe by The Righteous Brothers ...
fetching id for Cindy's Gonna Cry by Johnny Crawford ...
--> [error] Baby Get It (And Don't Quit It) by Jackie Wilson
fetching id for That's The Only Way by The 4 Seasons ...
fetching id for Painted, Tainted Rose by Al Martino ...
fetching id for You Can Never Stop Me Loving You by Johnny Tillotson ...
fetching id for Hey, Girl by Freddie Scott ...
fetching id for Birthday Party by The Pixies Three ...
fetching id for Betty In Bermudas by The Dovells ...
fetching id for What Does A Girl Do? by The Shirelles ...
fetching id for More by Kai Winding & Orchestra ...
fetching id for Toys In The Attic by Joe Sherman, his Orchestra and Chorus ...
--> [error] Cindy's Gonna Cry by Johnny Crawford
fetching id for Teenage Cleopatra by Tracey Dey ...
--> [error] Toys In The Attic by Joe Sherman, his Orchestra and Chorus
fetching id for Better To Give Than Receive by Joe Hinton ...
fetching id for Detroit City No. 2 by Ben Colder ...
--> [error] Teenage Cleopatra by Tracey Dey
fetching id for Cowboy Boots by Dave Dudley ...
fetching id for Lonely Drifter by The O'Jays ...
--> [error] Detroit City No. 2 by Ben Colder
fetching id for Hey Lonely One by Baby Washington ...
--> [error] Lonely Drifter by The O'Jays
fetching id for Hello Mudduh, Hello Fadduh! (A Letter From Camp) by Allan Sherman ...
fetching id for Hey There Lonely Boy by Ruby And The Romantics ...
fetching id for Frankie And Johnny by Sam Cooke ...
fetching id for Blowin' In The Wind by Peter, Paul & Mary ...
fetching id for Why Don't You Believe Me by The Duprees featuring Joey Vann ...
fetching id for Denise by Randy & The Rainbows ...
fetching id for Surfer Joe by The Surfaris ...
fetching id for This Is My Prayer by Theola Kilgore ...
fetching id for Toys In The Attic by Jack Jones ...
fetching id for Where Did The Good Times Go by Dick and DeeDee ...
--> [error] Hey Lonely One by Baby Washington
fetching id for That's How It Goes by George Maharis ...
fetching id for Let's Make Love Tonight by Bobby Rydell ...
fetching id for Michael - Pt. 1 by Steve Alaimo ...
fetching id for Please Don't Talk To The Lifeguard by Diane Ray ...
--> [error] Michael - Pt. 1 by Steve Alaimo
fetching id for Candy Girl by The 4 Seasons ...
fetching id for I Want To Stay Here by Steve & Eydie ...
fetching id for Make The World Go Away by Timi Yuro ...
fetching id for The Lonely Surfer by Jack Nitzsche ...
fetching id for Danke Schoen by Wayne Newton And The Newton Brothers ...
--> [error] Please Don't Talk To The Lifeguard by Diane Ray
fetching id for Straighten Up Your Heart by Barbara Lewis ...
fetching id for 8 X 10 by Bill Anderson ...
fetching id for Fingertips - Pt 2 by Little Stevie Wonder ...
--> [error] Danke Schoen by Wayne Newton And The Newton Brothers
fetching id for China Nights (Shina No Yoru) by Kyu Sakamoto ...
--> [error] Fingertips - Pt 2 by Little Stevie Wonder
fetching id for It's Too Late by Wilson Pickett ...
fetching id for (I Cried at) Laura's Wedding by Barbara Lynn ...
fetching id for Tell Me The Truth by Nancy Wilson ...
fetching id for Mr. Wishing Well by Nat King Cole ...
fetching id for Nick Teen And Al K. Hall by Rolf Harris ...
fetching id for Desert Pete by The Kingston Trio ...
fetching id for Que Sera, Sera (Whatever Will Be, Will Be) by The High Keyes ...
--> [error] China Nights (Shina No Yoru) by Kyu Sakamoto
fetching id for Wait Til' My Bobby Gets Home by Darlene Love ...
fetching id for Drownin' My Sorrows by Connie Francis ...
--> [error] Que Sera, Sera (Whatever Will Be, Will Be) by The High Keyes
fetching id for Lucky Lips by Cliff Richard ...
fetching id for Abilene by George Hamilton IV ...
fetching id for Leave Me Alone by Baby Washington ...
fetching id for It's A Lonely Town (Lonely Without You) by Gene McDaniels ...
fetching id for Sooner Or Later by Johnny Mathis ...
fetching id for Chinese Checkers by Booker T. & The MG's ...
fetching id for Man's Temptation by Gene Chandler ...
fetching id for Something Old, Something New by Paul and Paula ...
fetching id for Hear The Bells by The Tokens ...
fetching id for Faded Love by Patsy Cline ...
--> [error] Drownin' My Sorrows by Connie Francis
fetching id for Your Boyfriend's Back by Bobby Comstock And The Counts ...
fetching id for Judy's Turn To Cry by Lesley Gore ...
fetching id for Green, Green by The New Christy Minstrels ...
fetching id for True Love Never Runs Smooth by Gene Pitney ...
fetching id for I (Who Have Nothing) by Ben E. King ...
fetching id for It Hurts To Be Sixteen by Andrea Carroll ...
--> [error] It Hurts To Be Sixteen by Andrea Carroll
fetching id for Everybody Monkey by Freddy Cannon ...
fetching id for Pay Back by Etta James ...
fetching id for Your Baby's Gone Surfin' by Duane Eddy ...
fetching id for So Much In Love by The Tymes ...
fetching id for Twist It Up by Chubby Checker ...
--> [error] Your Boyfriend's Back by Bobby Comstock And The Counts
fetching id for (You're the) Devil In Disguise by Elvis Presley With The Jordanaires ...
fetching id for Surf City by Jan & Dean ...
fetching id for Groovy Baby by Billy Abbott And The Jewels ...
fetching id for Just One Look by Doris Troy ...
fetching id for When A Boy Falls In Love by Mel Carter ...
fetching id for The Dreamer by Neil Sedaka ...
fetching id for My Daddy Knows Best by The Marvelettes ...
fetching id for A Breath Taking Guy by The Supremes ...
fetching id for Organ Shout by Dave "Baby" Cortez ...
fetching id for Gone by The Rip Chords ...
fetching id for Do The Monkey by King Curtis ...
fetching id for Love Me All The Way by Kim Weston ...
fetching id for My Whole World Is Falling Down by Brenda Lee ...
fetching id for Easier Said Than Done by The Essex ...
fetching id for Marlena by The 4 Seasons ...
fetching id for Detroit City by Bobby Bare ...
fetching id for Memphis by Lonnie Mack ...
fetching id for Hopeless by Andy Williams ...
fetching id for Surfin' Hootenanny by Al Casey ...
fetching id for Mama Don't Allow by The Rooftop Singers ...
fetching id for Daughter by The Blenders ...
fetching id for I Wonder by Brenda Lee ...
fetching id for This Is All I Ask by Burl Ives ...
fetching id for This Is All I Ask by Tony Bennett ...
fetching id for Shake! Shake! Shake! by Jackie Wilson ...
fetching id for These Foolish Things by James Brown And The Famous Flames ...
--> [error] Organ Shout by Dave "Baby" Cortez
fetching id for Make The Music Play by Dionne Warwick ...
fetching id for Make The World Go Away by Ray Price ...
fetching id for Ring Of Fire by Johnny Cash ...
fetching id for Sometimes You Gotta Cry A Little by Bobby Bland ...
fetching id for Can't Nobody Love You by Solomon Burke ...
fetching id for Dum Dum Dee Dum by Johnny Cymbal ...
fetching id for It Won't Be This Way (Always) by The King Pins ...
--> [error] These Foolish Things by James Brown And The Famous Flames
fetching id for I'm Not A Fool Anymore by T.K. Hulin ...
fetching id for Dance, Everybody, Dance by The Dartells ...
--> [error] It Won't Be This Way (Always) by The King Pins
fetching id for Tie Me Kangaroo Down, Sport by Rolf Harris ...
fetching id for Till Then by The Classics ...
fetching id for Pride And Joy by Marvin Gaye ...
fetching id for Not Me by The Orlons ...
fetching id for Six Days On The Road by Dave Dudley ...
fetching id for Tips Of My Fingers by Roy Clark ...
fetching id for Be Careful Of Stones That You Throw by Dion ...
fetching id for How Many Teardrops by Lou Christie ...
fetching id for I'm Afraid To Go Home by Brian Hyland ...
fetching id for Surf Party by Chubby Checker ...
fetching id for I Will Love You by Richard Chamberlain ...
fetching id for Land Of 1000 Dances by Chris Kenner ...
fetching id for I Cried by Tammy Montgomery ...
--> [error] Dance, Everybody, Dance by The Dartells
fetching id for Sukiyaki by Kyu Sakamoto ...
fetching id for My True Confession by Brook Benton ...
fetching id for Goodnight My Love by The Fleetwoods ...
fetching id for Blue On Blue by Bobby Vinton ...
fetching id for Rock Me In The Cradle Of Love by Dee Dee Sharp ...
fetching id for Hootenanny by The Glencoves ...
fetching id for No One by Ray Charles ...
fetching id for Don't Say Goodnight And Mean Goodbye by The Shirelles ...
fetching id for Brenda by The Cupids ...
fetching id for Harry The Hairy Ape by Ray Stevens ...
fetching id for Dancin' Holiday by The Olympics ...
fetching id for Saturday Sunshine by Burt Bacharach ...
fetching id for The Minute You're Gone by Sonny James ...
fetching id for Still No. 2 by Ben Colder ...
--> [error] I Cried by Tammy Montgomery
fetching id for One Fine Day by The Chiffons ...
fetching id for Without Love (There Is Nothing) by Ray Charles ...
--> [error] Still No. 2 by Ben Colder
fetching id for Hello Stranger by Barbara Lewis ...
fetching id for Be True To Yourself by Bobby Vee ...
fetching id for It's My Party by Lesley Gore ...
fetching id for Shake A Tail Feather by The Five Du-Tones ...
fetching id for Cottonfields by Ace Cannon ...
fetching id for Will Power by The Cookies ...
--> [error] Without Love (There Is Nothing) by Ray Charles
fetching id for Jack The Ripper by Link Wray And The Wraymen ...
fetching id for Dance, Dance, Dance by Joey Dee ...
fetching id for At The Shore by Johnny Caswell ...
fetching id for True Blue Lou by Tony Bennett ...
fetching id for On Top Of Spaghetti by Tom Glazer And The Do-Re-Mi Children's Chorus ...
--> [error] Will Power by The Cookies
fetching id for My Summer Love by Ruby And The Romantics ...
fetching id for Swinging On A Star by Big Dee Irwin (with Little Eva) ...
--> [error] On Top Of Spaghetti by Tom Glazer And The Do-Re-Mi Children's Chorus
fetching id for (I Love You) Don't You Forget It by Perry Como ...
--> [error] Swinging On A Star by Big Dee Irwin (with Little Eva)
fetching id for Falling by Roy Orbison ...
fetching id for Those Lazy-Hazy-Crazy Days Of Summer by Nat King Cole ...
fetching id for You Can't Sit Down by The Dovells ...
fetching id for Summer's Comin' by Kirby St. Romain ...
--> [error] (I Love You) Don't You Forget It by Perry Como
fetching id for Like The Big Guys Do by The Rocky Fellers ...
fetching id for Antony And Cleopatra Theme by Ferrante & Teicher ...
--> [error] Summer's Comin' by Kirby St. Romain
fetching id for I Can't Stop Loving You by Count Basie ...
fetching id for What A Fool I've Been by Carla Thomas ...
fetching id for Baja by The Astronauts ...
fetching id for True Love by Richard Chamberlain ...
fetching id for Still by Bill Anderson ...
fetching id for Shut Down by The Beach Boys ...
fetching id for String Along by Rick Nelson ...
fetching id for First Quarrel by Paul and Paula ...
fetching id for Come Go With Me by Dion ...
fetching id for Da Doo Ron Ron (When He Walked Me Home) by The Crystals ...
fetching id for Come And Get These Memories by Martha & The Vandellas ...
fetching id for I Love You Because by Al Martino ...
fetching id for My Block by The Four Pennies ...
--> [error] Antony And Cleopatra Theme by Ferrante & Teicher
fetching id for From Me To You by Del Shannon ...
fetching id for Rat Race by The Drifters ...
fetching id for A Letter From Betty by Bobby Vee ...
fetching id for Summertime by Chris Columbo Quintet ...
fetching id for I Wish I Were A Princess by Little Peggy March ...
--> [error] I Wish I Were A Princess by Little Peggy March
fetching id for Birdland by Chubby Checker ...
fetching id for The Good Life by Tony Bennett ...
fetching id for Your Old Stand By by Mary Wells ...
fetching id for 18 Yellow Roses by Bobby Darin ...
fetching id for Poor Little Rich Girl by Steve Lawrence ...
--> [error] Summertime by Chris Columbo Quintet
fetching id for Not Too Young To Get Married by Bob B. Soxx And The Blue Jeans ...
fetching id for Don't Try To Fight It, Baby by Eydie Gorme ...
--> [error] Poor Little Rich Girl by Steve Lawrence
fetching id for Gypsy Woman by Rick Nelson ...
fetching id for If My Pillow Could Talk by Connie Francis ...
fetching id for Tears Of Joy by Chuck Jackson ...
--> [error] Don't Try To Fight It, Baby by Eydie Gorme
fetching id for Spring by Birdlegs & Pauline And Their Versatility Birds ...
fetching id for Say Wonderful Things by Patti Page ...
fetching id for Guilty by Jim Reeves ...
fetching id for The Ten Commandments Of Love by James MacArthur ...
--> [error] Tears Of Joy by Chuck Jackson
fetching id for Wildwood Days by Bobby Rydell ...
fetching id for Every Step Of The Way by Johnny Mathis ...
fetching id for Two Faces Have I by Lou Christie ...
fetching id for Shake A Hand by Jackie Wilson & Linda Hopkins ...
--> [error] The Ten Commandments Of Love by James MacArthur
fetching id for Old Smokey Locomotion by Little Eva ...
--> [error] Shake A Hand by Jackie Wilson & Linda Hopkins
fetching id for The Love Of My Man by Theola Kilgore ...
fetching id for Give Us Your Blessing by Ray Peterson ...
fetching id for Get Him by The Exciters ...
fetching id for Scarlett O'Hara by Lawrence Welk And His Orchestra ...
--> [error] Old Smokey Locomotion by Little Eva
fetching id for If You Wanna Be Happy by Jimmy Soul ...
fetching id for Prisoner Of Love by James Brown And The Famous Flames ...
fetching id for Another Saturday Night by Sam Cooke ...
fetching id for Losing You by Brenda Lee ...
fetching id for The Bounce by The Olympics ...
fetching id for There Goes (My Heart Again) by Fats Domino ...
--> [error] Scarlett O'Hara by Lawrence Welk And His Orchestra
fetching id for I'm Movin' On by Matt Lucas ...
--> [error] There Goes (My Heart Again) by Fats Domino
fetching id for Sting Ray by The Routers ...
fetching id for If You Need Me by Solomon Burke ...
fetching id for Graduation Day by Bobby Pickett ...
--> [error] I'm Movin' On by Matt Lucas
fetching id for Hello Jim by Paul Anka ...
--> [error] Graduation Day by Bobby Pickett
fetching id for Say Wonderful Things by Ronnie Carroll ...
fetching id for El Watusi by Ray Barretto ...
fetching id for Hot Pastrami by The Dartells ...
--> [error] Hello Jim by Paul Anka
fetching id for Pushover by Etta James ...
fetching id for Foolish Little Girl by The Shirelles ...
fetching id for I'm Saving My Love by Skeeter Davis ...
fetching id for Sweet Dreams (Of You) by Patsy Cline ...
fetching id for Take These Chains From My Heart by Ray Charles ...
fetching id for Let's Go Steady Again by Neil Sedaka ...
fetching id for I Will Follow Him by Little Peggy March ...
fetching id for Little Latin Lupe Lu by The Righteous Brothers ...
fetching id for Don't Make My Baby Blue by Frankie Laine ...
--> [error] Hot Pastrami by The Dartells
fetching id for Soon (I'll Be Home Again) by The 4 Seasons Featuring Frankie Valli ...
fetching id for Yeh-Yeh! by Mongo Santamaria Orch. ...
--> [error] Don't Make My Baby Blue by Frankie Laine
fetching id for Banzai Pipeline by Henry Mancini And His Orchestra ...
--> [error] Yeh-Yeh! by Mongo Santamaria Orch.
fetching id for Breakwater by Lawrence Welk And His Orchestra ...
--> [error] Banzai Pipeline by Henry Mancini And His Orchestra
fetching id for Pipeline by Chantay's ...
fetching id for Killer Joe by The Rocky Fellers ...
fetching id for What A Guy by The Raindrops ...
fetching id for Reverend Mr. Black by The Kingston Trio ...
fetching id for Puff (The Magic Dragon) by Peter, Paul & Mary ...
fetching id for Shame, Shame, Shame by Jimmy Reed ...
fetching id for Teenage Heaven by Johnny Cymbal ...
fetching id for Ain't That A Shame! by The 4 Seasons ...
fetching id for Patty Baby by Freddy Cannon ...
fetching id for Do It - Rat Now by Bill Black's Combo ...
fetching id for Hobo Flats - Part I by Jimmy Smith ...
--> [error] Breakwater by Lawrence Welk And His Orchestra
fetching id for Lonely Boy, Lonely Guitar by Duane Eddy ...
fetching id for A Stranger In Your Town by The Shacklefords ...
--> [error] Hobo Flats - Part I by Jimmy Smith
fetching id for Sad, Sad Girl And Boy by The Impressions ...
fetching id for You Know It Ain't Right by Joe Hinton ...
fetching id for Spring In Manhattan by Tony Bennett ...
fetching id for Black Cloud by Chubby Checker ...
fetching id for Can't Get Used To Losing You by Andy Williams ...
fetching id for This Little Girl by Dion ...
fetching id for Hot Pastrami With Mashed Potatoes - Part I by Joey Dee & the Starliters ...
--> [error] A Stranger In Your Town by The Shacklefords
fetching id for Two Kind Of Teardrops by Del Shannon ...
fetching id for Little Band Of Gold by James Gilreath ...
fetching id for That's How Heartaches Are Made by Baby Washington ...
fetching id for If You Need Me by Wilson Pickett ...
fetching id for Gravy Waltz by Steve Allen ...
--> [error] Hot Pastrami With Mashed Potatoes - Part I by Joey Dee & the Starliters
fetching id for Needles And Pins by Jackie DeShannon ...
fetching id for The Last Leaf by The Cascades ...
fetching id for Soulville by Dinah Washington ...
fetching id for Forever by The Marvelettes ...
fetching id for These Arms Of Mine by Otis Redding ...
fetching id for Days Of Wine And Roses by Andy Williams ...
fetching id for He's So Fine by The Chiffons ...
fetching id for Charms by Bobby Vee ...
fetching id for Mecca by Gene Pitney ...
fetching id for Remember Diana by Paul Anka ...
fetching id for The Dog by Rufus Thomas ...
fetching id for Got You On My Mind by Cookie And His Cupcakes ...
--> [error] Gravy Waltz by Steve Allen
fetching id for Danger by Vic Dana ...
fetching id for River's Invitation by Percy Mayfield ...
fetching id for On Broadway by The Drifters ...
fetching id for Watermelon Man by Mongo Santamaria Band ...
--> [error] Got You On My Mind by Cookie And His Cupcakes
fetching id for A Love She Can Count On by The Miracles ...
fetching id for Don't Say Nothin' Bad (About My Baby) by The Cookies ...
--> [error] Watermelon Man by Mongo Santamaria Band
fetching id for Tom Cat by The Rooftop Singers ...
fetching id for (Today I Met) The Boy I'm Gonna Marry by Darlene Love ...
fetching id for Days Of Wine And Roses by Henry Mancini And His Orchestra ...
fetching id for Young And In Love by Dick and DeeDee ...
fetching id for Baby Workout by Jackie Wilson ...
fetching id for The Bird's The Word by The Rivingtons ...
fetching id for Call Me Irresponsible by Jack Jones ...
fetching id for Call Me Irresponsible by Frank Sinatra ...
fetching id for Ronnie, Call Me When You Get A Chance by Shelley Fabares ...
fetching id for I Know I Know by "Pookie" Hudson ...
fetching id for The Last Minute (Pt. I) by Jimmy McGriff ...
fetching id for Linda by Jan & Dean ...
fetching id for Young Lovers by Paul and Paula ...
fetching id for The End Of The World by Skeeter Davis ...
fetching id for Heart by Kenny Chandler ...
fetching id for Locking Up My Heart by The Marvelettes ...
fetching id for Here I Stand by The Rip Chords ...
fetching id for Bony Moronie by The Appalachians ...
--> [error] I Know I Know by "Pookie" Hudson
fetching id for Rockin' Crickets by Rockin' Rebels ...
fetching id for Shy Girl by The Cascades ...
fetching id for Heart! (I Hear You Beating) by Wayne Newton And The Newton Brothers ...
--> [error] Bony Moronie by The Appalachians
fetching id for The Folk Singer by Tommy Roe ...
fetching id for He's A Bad Boy by Carole King ...
fetching id for Old Enough To Love by Ricky Nelson ...
fetching id for South Street by The Orlons ...
fetching id for Sandy by Dion ...
fetching id for Over The Mountain (Across The Sea) by Bobby Vinton ...
fetching id for Do The Bird by Dee Dee Sharp ...
fetching id for Twenty Miles by Chubby Checker ...
fetching id for Rainbow by Gene Chandler ...
fetching id for Mr. Bass Man by Johnny Cymbal ...
fetching id for Memory Lane by The Hippies (Formerly The Tams) ...
--> [error] Heart! (I Hear You Beating) by Wayne Newton And The Newton Brothers
fetching id for How Can I Forget by Jimmy Holiday ...
fetching id for Don't Let Her Be Your Baby by The Contours ...
--> [error] Memory Lane by The Hippies (Formerly The Tams)
fetching id for Mother, Please! by Jo Ann Campbell ...
fetching id for One Boy Too Late by Mike Clifford ...
fetching id for Don't Be Afraid, Little Darlin' by Steve Lawrence ...
--> [error] One Boy Too Late by Mike Clifford
fetching id for Our Day Will Come by Ruby And The Romantics ...
fetching id for I Got What I Wanted by Brook Benton ...
fetching id for Out Of My Mind by Johnny Tillotson ...
fetching id for Follow The Boys by Connie Francis ...
fetching id for In Dreams by Roy Orbison ...
fetching id for What Are Boys Made Of by The Percells ...
fetching id for Our Winter Love by Bill Pursell ...
fetching id for Whatever You Want by Jerry Butler ...
fetching id for You Never Miss Your Water (Till The Well Runs Dry) by Little Esther Phillips & Big Al Downing ...
--> [error] Don't Be Afraid, Little Darlin' by Steve Lawrence
fetching id for Ask Me by Maxine Brown ...
fetching id for Ann-Marie by The Belmonts ...
fetching id for Theme From Lawrence Of Arabia by Ferrante & Teicher ...
--> [error] You Never Miss Your Water (Till The Well Runs Dry) by Little Esther Phillips & Big Al Downing
fetching id for Rhythm Of The Rain by The Cascades ...
fetching id for Blame It On The Bossa Nova by Eydie Gorme ...
fetching id for I Wanna Be Around by Tony Bennett ...
fetching id for All Over The World by Nat King Cole ...
fetching id for All I Have To Do Is Dream by Richard Chamberlain ...
fetching id for Dearer Than Life by Brook Benton ...
fetching id for Sun Arise by Rolf Harris ...
fetching id for He's So Heavenly by Brenda Lee ...
--> [error] Theme From Lawrence Of Arabia by Ferrante & Teicher
fetching id for Hot Cakes! 1st Serving by Dave "Baby" Cortez ...
--> [error] He's So Heavenly by Brenda Lee
fetching id for Preacherman by Charlie Russo ...
--> [error] Hot Cakes! 1st Serving by Dave "Baby" Cortez
fetching id for You're The Reason I'm Living by Bobby Darin ...
fetching id for Yakety Sax by Boots Randolph and his Combo ...
--> [error] Preacherman by Charlie Russo
fetching id for Wild Weekend by The Rebels ...
fetching id for Let's Limbo Some More by Chubby Checker ...
fetching id for Laughing Boy by Mary Wells ...
fetching id for Walk Like A Man by The 4 Seasons ...
fetching id for I Got A Woman by Rick Nelson ...
fetching id for I'm Just A Country Boy by George McCurn ...
fetching id for You Don't Love Me Anymore (And I Can Tell) by Rick Nelson ...
fetching id for He's Got The Power by The Exciters ...
fetching id for Amy by Paul Petersen ...
--> [error] I'm Just A Country Boy by George McCurn
fetching id for Sax Fifth Avenue by Johnny Beecher and his Buckingham Road Quintet ...
--> [error] Amy by Paul Petersen
fetching id for Bill Bailey, Won't You Please Come Home by Ella Fitzgerald ...
fetching id for This Empty Place by Dionne Warwick ...
fetching id for How Can I Forget by Ben E. King ...
fetching id for Eternally by The Chantels ...
fetching id for Diane by Joe Harnell And His Orchestra ...
fetching id for If You Can't Rock Me by Ricky Nelson ...
fetching id for What Will My Mary Say by Johnny Mathis ...
fetching id for Why Do Lovers Break Each Other's Heart? by Bob B. Soxx And The Blue Jeans ...
fetching id for One Broken Heart For Sale by Elvis Presley With The Mello Men ...
--> [error] Diane by Joe Harnell And His Orchestra
fetching id for Ruby Baby by Dion ...
fetching id for Love For Sale by Arthur Lyman Group ...
fetching id for Don't Set Me Free by Ray Charles and his Orchestra ...
fetching id for Back At The Chicken Shack, Part 1 by Jimmy Smith ...
--> [error] Back At The Chicken Shack, Part 1 by Jimmy Smith
fetching id for Don't Wanna Think About Paula by Dickey Lee ...
--> [error] Don't Set Me Free by Ray Charles and his Orchestra
fetching id for Meditation (Meditacao) by Charlie Byrd ...
fetching id for Funny Man by Ray Stevens ...
fetching id for Don't Mention My Name by The Shepherd Sisters ...
fetching id for Not For All The Money In The World by The Shirelles ...
fetching id for Hey Paula by Paul and Paula ...
fetching id for Let's Turkey Trot by Little Eva ...
fetching id for Greenback Dollar by The Kingston Trio ...
fetching id for Mama Didn't Lie by Jan Bradley ...
fetching id for Boss Guitar by Duane Eddy and the Rebelettes ...
--> [error] Don't Mention My Name by The Shepherd Sisters
fetching id for Alice In Wonderland by Neil Sedaka ...
fetching id for Tell Him I'm Not Home by Chuck Jackson ...
fetching id for Butterfly Baby by Bobby Rydell ...
fetching id for Cast Your Fate To The Wind by Vince Guaraldi Trio ...
fetching id for That's All by Rick Nelson ...
fetching id for I'll Make It Alright by The Valentinos (The Lovers) ...
fetching id for Walk Right In by The Rooftop Singers ...
fetching id for The Gypsy Cried by Lou Christie ...
fetching id for Hitch Hike by Marvin Gaye ...
fetching id for The Jive Samba by Cannonball Adderley ...
fetching id for I'm In Love Again by Rick Nelson ...
fetching id for Insult To Injury by Timi Yuro ...
fetching id for Gone With The Wind by The Duprees featuring Joey Vann ...
fetching id for Marching Thru Madrid by Herb Alpert's Tijuana Brass ...
fetching id for Send Me Some Lovin' by Sam Cooke ...
fetching id for From A Jack To A King by Ned Miller ...
fetching id for You've Really Got A Hold On Me by The Miracles ...
fetching id for Fly Me To The Moon - Bossa Nova by Joe Harnell And His Orchestra ...
--> [error] I'll Make It Alright by The Valentinos (The Lovers)
fetching id for Call On Me by Bobby Bland ...
fetching id for Little Town Flirt by Del Shannon ...
fetching id for That's The Way Love Is by Bobby Bland ...
fetching id for As Long As She Needs Me by Sammy Davis Jr. ...
fetching id for Let's Stomp by Bobby Comstock ...
--> [error] Fly Me To The Moon - Bossa Nova by Joe Harnell And His Orchestra
fetching id for Pepino's Friend Pasqual (The Italian Pussy-Cat) by Lou Monte ...
fetching id for Nothing Goes Up (Without Coming Down) by Nat King Cole ...
--> [error] Let's Stomp by Bobby Comstock
fetching id for Cigarettes And Coffee Blues by Marty Robbins ...
fetching id for Little Star by Bobby Callender ...
--> [error] Nothing Goes Up (Without Coming Down) by Nat King Cole
fetching id for Don't Let Me Cross Over by Carl Butler & Pearl ...
fetching id for Two Wrongs Don't Make A Right by Mary Wells ...
fetching id for He's Sure The Boy I Love by The Crystals ...
fetching id for Up On The Roof by The Drifters ...
fetching id for They Remind Me Too Much Of You by Elvis Presley With The Mello Men ...
--> [error] Little Star by Bobby Callender
fetching id for Big Wide World by Teddy Randazzo ...
fetching id for Ridin' The Wind by The Tornadoes ...
fetching id for Bossa Nova U.S.A. by The Dave Brubeck Quartet ...
fetching id for I'm The One Who Loves You by The Impressions ...
fetching id for All About My Girl by Jimmy McGriff ...
fetching id for Baby, Baby, Baby by Sam Cooke ...
fetching id for Pin A Medal On Joey by James Darren ...
fetching id for Hi-Lili, Hi-Lo by Richard Chamberlain ...
fetching id for Who Stole The Keeshka? by The Matys Bros. ...
--> [error] Ridin' The Wind by The Tornadoes
fetching id for The Brightest Smile In Town by Ray Charles and his Orchestra ...
--> [error] Who Stole The Keeshka? by The Matys Bros.
fetching id for Don't Be Cruel by Barbara Lynn ...
fetching id for My Foolish Heart by The Demensions ...
fetching id for Your Used To Be by Brenda Lee ...
fetching id for The Night Has A Thousand Eyes by Bobby Vee ...
fetching id for Every Day I Have To Cry by Steve Alaimo ...
fetching id for Love (Makes the World Go 'round) by Paul Anka ...
fetching id for Java by Floyd Cramer ...
fetching id for I Really Don't Want To Know by Little Esther Phillips ...
--> [error] The Brightest Smile In Town by Ray Charles and his Orchestra
fetching id for What Does A Girl Do? by Marcie Blane ...
fetching id for If Mary's There by Brian Hyland ...
fetching id for Meditation (Meditacao) by Pat Boone ...
--> [error] I Really Don't Want To Know by Little Esther Phillips
fetching id for The 2,000 Pound Bee (Part 2) by The Ventures ...
--> [error] Meditation (Meditacao) by Pat Boone
fetching id for Pretty Boy Lonely by Patti Page ...
fetching id for I Will Live My Life For You by Tony Bennett ...
fetching id for Boss by The Rumblers ...
fetching id for Loop De Loop by Johnny Thunder ...
fetching id for Go Away Little Girl by Steve Lawrence ...
fetching id for It's Up To You by Rick Nelson ...
fetching id for Half Heaven - Half Heartache by Gene Pitney ...
fetching id for My Coloring Book by Sandy Stewart ...
--> [error] The 2,000 Pound Bee (Part 2) by The Ventures
fetching id for The Cinnamon Cinder (It's A Very Nice Dance) by The Pastel Six ...
fetching id for My Dad by Paul Petersen ...
fetching id for She'll Never Know by Brenda Lee ...
--> [error] My Coloring Book by Sandy Stewart
fetching id for I'm A Woman by Peggy Lee ...
fetching id for Strange I Know by The Marvelettes ...
fetching id for Chicken Feed by Bent Fabric and His Piano ...
--> [error] She'll Never Know by Brenda Lee
fetching id for Don't Fence Me In by George Maharis ...
fetching id for Faded Love by Jackie DeShannon ...
fetching id for I Saw Linda Yesterday by Dickey Lee ...
fetching id for Puddin N' Tain (Ask Me Again, I'll Tell You The Same) by The Alley Cats ...
fetching id for Tell Him by The Exciters ...
fetching id for My Coloring Book by Kitty Kallen ...
--> [error] Don't Fence Me In by George Maharis
fetching id for Settle Down (Goin' Down That Highway) by Peter, Paul & Mary ...
fetching id for I'm Gonna' Be Warm This Winter by Connie Francis ...
fetching id for Proud by Johnny Crawford ...
fetching id for Don't Make Me Over by Dionne Warwick ...
fetching id for Two Lovers by Mary Wells ...
fetching id for Would It Make Any Difference To You by Etta James ...
fetching id for Ain't Gonna Kiss Ya by The Ribbons ...
fetching id for Leavin' On Your Mind by Patsy Cline ...
fetching id for From The Bottom Of My Heart (Dammi, Dammi, Dammi) by Dean Martin ...
fetching id for Only You (And You Alone) by Mr. Acker Bilk ...
--> [error] My Coloring Book by Kitty Kallen
fetching id for Shake Sherry by The Contours ...
fetching id for Telstar by The Tornadoes ...
fetching id for Everybody Loves A Lover by The Shirelles ...
fetching id for The Ballad Of Jed Clampett by Flatt & Scruggs ...
fetching id for Shake Me I Rattle (Squeeze Me I Cry) by Marion Worth ...
fetching id for How Much Is That Doggie In The Window by Baby Jane & The Rockabyes ...
--> [error] Only You (And You Alone) by Mr. Acker Bilk
fetching id for The Popeye Waddle by Don Covay ...
fetching id for What To Do With Laurie by Mike Clifford ...
--> [error] How Much Is That Doggie In The Window by Baby Jane & The Rockabyes
fetching id for Let Me Go The Right Way by The Supremes ...
fetching id for I'd Rather Be Here In Your Arms by The Duprees ...
fetching id for Zing! Went The Strings Of My Heart by The Furys ...
--> [error] What To Do With Laurie by Mike Clifford
fetching id for Al Di La by Connie Francis ...
fetching id for M.G. Blues by Jimmy McGriff ...
fetching id for Baby, You're Driving Me Crazy by Joey Dee ...
fetching id for Hotel Happiness by Brook Benton ...
fetching id for Pepino The Italian Mouse by Lou Monte ...
fetching id for Remember Then by The Earls ...
fetching id for Limbo Rock by Chubby Checker ...
fetching id for Zip-A-Dee Doo-Dah by Bob B. Soxx And The Blue Jeans ...
fetching id for See See Rider by LaVern Baker ...
fetching id for Willie Can by Sue Thompson ...
fetching id for Walk Right In by The Moments ...
fetching id for I Need You by Rick Nelson ...
fetching id for Remember Baby by Shep And The Limelites ...
fetching id for Every Beat Of My Heart by James Brown And The Famous Flames ...
fetching id for Shutters And Boards by Jerry Wallace ...
fetching id for Bobby's Girl by Marcie Blane ...
fetching id for Big Girls Don't Cry by The 4 Seasons ...
fetching id for Wiggle Wobble by Les Cooper and the Soul Rockers ...
fetching id for Return To Sender by Elvis Presley With The Jordanaires ...
--> [error] Zing! Went The Strings Of My Heart by The Furys
fetching id for Lovesick Blues by Frank Ifield ...
fetching id for Some Kinda Fun by Chris Montez ...
fetching id for You Are My Sunshine by Ray Charles ...
fetching id for Trouble Is My Middle Name by Bobby Vinton ...
fetching id for Molly by Bobby Goldsboro ...
fetching id for The Same Old Hurt by Burl Ives ...
fetching id for The Lone Teen Ranger by Jerry Landis ...
fetching id for Let's Kiss And Make Up by Bobby Vinton ...
fetching id for Chains by The Cookies ...
fetching id for The Lonely Bull (El Solo Torro) by Herb Alpert And Tijuana Brass ...
fetching id for Let's Go (pony) by The Routers ...
fetching id for My Wife Can't Cook by Lonnie Russ ...
fetching id for Darkest Street In Town by Jimmy Clanton ...
fetching id for Jellybread by Booker T. & The MG's ...
fetching id for Oo-La-La-Limbo by Danny & The Juniors ...
fetching id for Love Came To Me by Dion ...
fetching id for Dear Lonely Hearts by Nat King Cole ...
fetching id for The Love Of A Boy by Timi Yuro ...
fetching id for Keep Your Hands Off My Baby by Little Eva ...
fetching id for Don't Hang Up by The Orlons ...
fetching id for Ruby Ann by Marty Robbins ...
fetching id for (Dance With The) Guitar Man by Duane Eddy and the Rebelettes ...
--> [error] Oo-La-La-Limbo by Danny & The Juniors
fetching id for Ten Little Indians by The Beach Boys ...
fetching id for That's Life (That's Tough) by Gabriel And The Angels ...
fetching id for Coney Island Baby by The Excellents ...
fetching id for You're Gonna Need Me by Barbara Lynn ...
fetching id for Little Tin Soldier by The Toy Dolls ...
fetching id for Look At Me by Dobie Gray ...
fetching id for Red Pepper I by Roosevelt Fountain And Pens Of Rhythm ...
fetching id for Monsters' Holiday by Bobby "Boris" Pickett And The Crypt-Kickers ...
--> [error] Monsters' Holiday by Bobby "Boris" Pickett And The Crypt-Kickers
fetching id for Ride! by Dee Dee Sharp ...
fetching id for Your Cheating Heart by Ray Charles ...
--> [error] Red Pepper I by Roosevelt Fountain And Pens Of Rhythm
fetching id for Desafinado by Stan Getz/Charlie Byrd ...
fetching id for Comin' Home Baby by Mel Torme ...
fetching id for You Threw A Lucky Punch by Gene Chandler ...
fetching id for Echo by The Emotions ...
--> [error] Your Cheating Heart by Ray Charles
fetching id for Trouble In Mind by Aretha Franklin ...
fetching id for Big Boat by Peter, Paul & Mary ...
fetching id for Someone Somewhere by Junior Parker ...
fetching id for Slop Time by The Sherrys ...
fetching id for Rumors by Johnny Crawford ...
fetching id for The Push And Kick by Mark Valentino ...
fetching id for He's A Rebel by The Crystals ...
fetching id for All Alone Am I by Brenda Lee ...
fetching id for Spanish Lace by Gene McDaniels ...
fetching id for I May Not Live To See Tomorrow by Brian Hyland ...
fetching id for Me And My Shadow by Frank Sinatra & Sammy Davis Jr. ...
fetching id for Rainbow At Midnight by Jimmie Rodgers ...
--> [error] Rainbow At Midnight by Jimmie Rodgers
fetching id for Gonna Raise A Rukus Tonight by Jimmy Dean ...
--> [error] I May Not Live To See Tomorrow by Brian Hyland
fetching id for Sam's Song by Dean Martin & Sammy Davis Jr. ...
fetching id for The (Bossa Nova) Bird by The Dells ...
fetching id for Let Me Entertain You by Ray Anthony ...
--> [error] Gonna Raise A Rukus Tonight by Jimmy Dean
fetching id for Santa Claus Is Coming To Town by The 4 Seasons ...
fetching id for The Little Drummer Boy by The Harry Simeone Chorale ...
--> [error] Let Me Entertain You by Ray Anthony
fetching id for The Chipmunk Song (Christmas Don't Be Late) by David Seville And The Chipmunks ...
--> [error] The Little Drummer Boy by The Harry Simeone Chorale
fetching id for Santa Claus Is Watching You by Ray Stevens ...
fetching id for I Left My Heart In San Francisco by Tony Bennett ...
fetching id for Don't Go Near The Eskimos by Ben Colder ...
fetching id for Rudolph The Red Nosed Reindeer by David Seville And The Chipmunks ...
--> [error] The Chipmunk Song (Christmas Don't Be Late) by David Seville And The Chipmunks
fetching id for Lover Come Back To Me by The Cleftones ...
fetching id for Silent Night, Holy Night by Mahalia Jackson ...
fetching id for Eso Beso (That Kiss!) by Paul Anka ...
fetching id for My Own True Love by The Duprees ...
fetching id for The Cha-Cha-Cha by Bobby Rydell ...
fetching id for I Can't Help It (If I'm Still In Love With You) by Johnny Tillotson ...
fetching id for A Little Bit Now (A Little Bit Later) by The Majors ...
--> [error] Rudolph The Red Nosed Reindeer by David Seville And The Chipmunks
fetching id for Diddle-Dee-Dum (What Happens When Your Love Has Gone) by The Belmonts ...
fetching id for Road Hog by John D. Loudermilk ...
fetching id for She's A Troublemaker by The Majors ...
--> [error] A Little Bit Now (A Little Bit Later) by The Majors
fetching id for Alvin's Harmonica by David Seville And The Chipmunks ...
fetching id for White Christmas by The Drifters Featuring Clyde McPhatter And Bill Pinkney ...
fetching id for Jingle Bell Rock by Bobby Rydell/Chubby Checker ...
fetching id for Three Hearts In A Tangle by James Brown And The Famous Flames ...
fetching id for Twilight Time by Andy Williams ...
fetching id for Does He Mean That Much To You? by Eddy Arnold ...
--> [error] She's A Troublemaker by The Majors
fetching id for Next Door To An Angel by Neil Sedaka ...
fetching id for Only Love Can Break A Heart by Gene Pitney ...
fetching id for Mary Ann Regrets by Burl Ives ...
fetching id for Stubborn Kind Of Fellow by Marvin Gaye ...
fetching id for Lovers By Night, Strangers By Day by The Fleetwoods ...
fetching id for I Lost My Baby by Joey Dee ...
fetching id for Baby Has Gone Bye Bye by George Maharis ...
fetching id for Desafinado (Slightly Out Of Tune) by Pat Thomas ...
--> [error] Does He Mean That Much To You? by Eddy Arnold
fetching id for I Found A New Baby by Bobby Darin ...
fetching id for Limelight by Mr. Acker Bilk ...
--> [error] Desafinado (Slightly Out Of Tune) by Pat Thomas
fetching id for Theme From Taras Bulba (The Wishing Star) by Jerry Butler ...
fetching id for Gina by Johnny Mathis ...
fetching id for Nothing Can Change This Love by Sam Cooke ...
fetching id for That Stranger Used To Be My Girl by Trade Martin ...
fetching id for What Kind Of Fool Am I by Sammy Davis Jr. ...
fetching id for I've Got A Woman (Part I) by Jimmy McGriff ...
fetching id for Mama Sang A Song by Stan Kenton ...
--> [error] Limelight by Mr. Acker Bilk
fetching id for Close To Cathy by Mike Clifford ...
fetching id for Léah by Roy Orbison ...
fetching id for Mama Sang A Song by Walter Brennan ...
--> [error] Mama Sang A Song by Stan Kenton
fetching id for Popeye (The Hitchhiker) by Chubby Checker ...
fetching id for If You Were A Rock And Roll Record by Freddy Cannon ...
fetching id for The Jitterbug by The Dovells ...
fetching id for Still Waters Run Deep by Brook Benton ...
fetching id for Getting Ready For The Heartbreak by Chuck Jackson ...
--> [error] Mama Sang A Song by Walter Brennan
fetching id for Zero-Zero by Lawrence Welk ...
fetching id for Night Time by Pete Antell ...
fetching id for I Was Such A Fool (To Fall In Love With You) by Connie Francis ...
fetching id for James (Hold The Ladder Steady) by Sue Thompson ...
fetching id for Surfin' Safari by The Beach Boys ...
fetching id for Love Me Tender by Richard Chamberlain ...
fetching id for Don't Ask Me To Be Friends by The Everly Brothers ...
fetching id for I'll Bring It Home To You by Carla Thomas ...
fetching id for Workin' For The Man by Roy Orbison ...
fetching id for Torture by Kris Jensen ...
fetching id for Untie Me by The Tams ...
fetching id for Stormy Monday Blues by Bobby Bland ...
--> [error] Getting Ready For The Heartbreak by Chuck Jackson
fetching id for Anna (Go To Him) by Arthur Alexander ...
fetching id for Next Door To The Blues by Etta James ...
fetching id for Mr. Lonely by Buddy Greco ...
fetching id for Heart Breaker by Dean Christie ...
fetching id for I'm So Lonesome I Could Cry by Johnny Tillotson ...
fetching id for Somebody Have Mercy by Sam Cooke ...
fetching id for This Land Is Your Land by The New Christy Minstrels ...
fetching id for This Land Is Your Land by Ketty Lester ...
--> [error] Heart Breaker by Dean Christie
fetching id for You're A Sweetheart by Dinah Washington ...
fetching id for Pop Pop Pop - Pie by The Sherrys ...
--> [error] This Land Is Your Land by Ketty Lester
fetching id for Sherry by The 4 Seasons ...
fetching id for Patches by Dickey Lee ...
fetching id for Green Onions by Booker T. & The MG's ...
fetching id for Susie Darlin' by Tommy Roe ...
fetching id for Alley Cat by Bent Fabric and His Piano ...
--> [error] Pop Pop Pop - Pie by The Sherrys
fetching id for He Thinks I Still Care by Connie Francis ...
fetching id for I've Been Everywhere by Hank Snow ...
fetching id for You Can Run (But You Can't Hide) by Jerry Butler ...
fetching id for Heartaches by Patsy Cline ...
fetching id for Happy Weekend by Dave "Baby" Cortez ...
--> [error] He Thinks I Still Care by Connie Francis
fetching id for The Searching Is Over by Joe Henderson ...
--> [error] Happy Weekend by Dave "Baby" Cortez
fetching id for Dear Hearts And Gentle People by The Springfields ...
fetching id for Fiesta by Dave "Baby" Cortez ...
fetching id for Fools Rush In by Etta James ...
fetching id for Don't Stop The Wedding by Ann Cole ...
--> [error] The Searching Is Over by Joe Henderson
fetching id for If A Man Answers by Bobby Darin ...
fetching id for Don't You Believe It by Andy Williams ...
fetching id for I'm Going Back To School by Dee Clark ...
fetching id for I Remember You by Frank Ifield ...
fetching id for Ramblin' Rose by Nat King Cole ...
fetching id for Hide & Go Seek, Part I by Bunker Hill ...
--> [error] Don't Stop The Wedding by Ann Cole
fetching id for Twistin' With Linda by The Isley Brothers ...
--> [error] Hide & Go Seek, Part I by Bunker Hill
fetching id for The Alley Cat Song by David Thorne ...
--> [error] Twistin' With Linda by The Isley Brothers
fetching id for I'm Here To Get My Baby Out Of Jail by The Everly Brothers ...
fetching id for Four Walls by Kay Starr ...
fetching id for Mama Sang A Song by Bill Anderson ...
fetching id for Cold, Cold Heart by Dinah Washington ...
fetching id for One More Town by The Kingston Trio ...
fetching id for Aladdin by Bobby Curtola ...
fetching id for Let's Dance by Chris Montez ...
fetching id for Warmed Over Kisses (Left Over Love) by Brian Hyland ...
fetching id for Little Black Book by Jimmy Dean ...
fetching id for Venus In Blue Jeans by Jimmy Clanton ...
fetching id for Baby Face by Bobby Darin ...
--> [error] The Alley Cat Song by David Thorne
fetching id for No One Will Ever Know by Jimmie Rodgers ...
fetching id for The Burning Of Atlanta by Claude King ...
fetching id for Second Fiddle Girl by Barbara Lynn ...
fetching id for I'll Remember Carol by Tommy Boyce ...
fetching id for Don't Ever Leave Me by Bob And Earl ...
fetching id for Magic Wand by Don & Juan ...
fetching id for What Kind Of Fool Am I? by Robert Goulet ...
fetching id for I Left My Heart In The Balcony by Linda Scott ...
--> [error] Don't Ever Leave Me by Bob And Earl
fetching id for Don't Go Near The Indians by Rex Allen ...
fetching id for Rain Rain Go Away by Bobby Vinton ...
--> [error] I Left My Heart In The Balcony by Linda Scott
fetching id for If I Had A Hammer (The Hammer Song) by Peter, Paul & Mary ...
fetching id for Ten Lonely Guys by Pat Boone ...
fetching id for King Of The Whole Wide World by Elvis Presley With The Jordanaires ...
fetching id for Did You Ever See A Dream Walking by Fats Domino ...
fetching id for Father Knows Best by The Radiants ...
--> [error] Ten Lonely Guys by Pat Boone
fetching id for Hully Gully Baby by The Dovells ...
fetching id for Lie To Me by Brook Benton ...
fetching id for What Kind Of Love Is This by Joey Dee & the Starliters ...
fetching id for You Beat Me To The Punch by Mary Wells ...
fetching id for You Belong To Me by The Duprees ...
fetching id for Stop The Music by The Shirelles ...
fetching id for Sheila by Tommy Roe ...
fetching id for Sweet Sixteen Bars by Earl Grant ...
fetching id for Save All Your Lovin' For Me by Brenda Lee ...
--> [error] Father Knows Best by The Radiants
fetching id for What Kind Of Fool Am I by Anthony Newley ...
fetching id for Further More by Ray Stevens ...
--> [error] Save All Your Lovin' For Me by Brenda Lee
fetching id for Where Do You Come From by Elvis Presley With The Jordanaires ...
--> [error] Further More by Ray Stevens
fetching id for Punish Her by Bobby Vee ...
fetching id for Teen Age Idol by Rick Nelson ...
fetching id for It Might As Well Rain Until September by Carole King ...
fetching id for Beechwood 4-5789 by The Marvelettes ...
fetching id for Come On Little Angel by The Belmonts ...
fetching id for The Things We Did Last Summer by Shelley Fabares ...
fetching id for A Wonderful Dream by The Majors ...
fetching id for A Taste Of Honey by Martin Denny and His Orchestra ...
--> [error] Teen Age Idol by Rick Nelson
fetching id for You Can't Judge A Book By The Cover by Bo Diddley ...
fetching id for When The Boys Get Together by Joanie Sommers ...
fetching id for I'm Gonna Change Everything by Jim Reeves ...
fetching id for Limbo Dance by The Champs ...
fetching id for The Loco-Motion by Little Eva ...
fetching id for Rinky Dink by Baby Cortez ...
fetching id for If I Didn't Have A Dime (To Play The Jukebox) by Gene Pitney ...
fetching id for The Swiss Maid by Del Shannon ...
fetching id for I Keep Forgettin' by Chuck Jackson ...
fetching id for What Time Is It? by The Jive Five With Eugene Pitts ...
--> [error] When The Boys Get Together by Joanie Sommers
fetching id for ...And Then There Were Drums by Sandy Nelson ...
fetching id for 409 by The Beach Boys ...
fetching id for Forever And A Day by Jackie Wilson ...
fetching id for You Can't Lie To A Liar by Ketty Lester ...
fetching id for Hail To The Conquering Hero by James Darren ...
--> [error] You Can't Lie To A Liar by Ketty Lester
fetching id for Hully Gully Guitar by Jerry Reed And The Hully Girlies ...
--> [error] Hail To The Conquering Hero by James Darren
fetching id for Silver Threads And Golden Needles by The Springfields ...
fetching id for She's Not You by Elvis Presley With The Jordanaires ...
fetching id for Devil Woman by Marty Robbins ...
fetching id for You Don't Know Me by Ray Charles ...
fetching id for Shame On Me by Bobby Bare ...
fetching id for Point Of No Return by Gene McDaniels ...
fetching id for A Swingin' Safari by Billy Vaughn And His Orchestra ...
fetching id for Send Me The Pillow You Dream On by Johnny Tillotson ...
fetching id for Your Nose Is Gonna Grow by Johnny Crawford ...
fetching id for Party Lights by Claudine Clark ...
fetching id for Lollipops And Roses by Paul Petersen ...
fetching id for Don't You Worry by Don Gardner And Dee Dee Ford ...
--> [error] Hully Gully Guitar by Jerry Reed And The Hully Girlies
fetching id for Long As The Rose Is Red by Florraine Darlin ...
fetching id for I Love You The Way You Are by Bobby Vinton ...
fetching id for Lookin' For A Love by The Valentinos ...
fetching id for Papa-Oom-Mow-Mow by The Rivingtons ...
fetching id for Mashed Potatoes U.S.A. by James Brown And The Famous Flames ...
fetching id for I'm The Girl From Wolverton Mountain by Jo Ann Campbell ...
fetching id for What's Gonna Happen When Summer's Done by Freddy Cannon ...
fetching id for The Boys' Night Out by Patti Page ...
--> [error] Long As The Rose Is Red by Florraine Darlin
fetching id for Big Love by Joe Henderson ...
fetching id for Yield Not To Temptation by Bobby Bland ...
fetching id for Every Night (Without You) by Paul Anka ...
fetching id for Ol' Man River by Jimmy Smith ...
fetching id for Broken Heart by The Fiestas ...
fetching id for Way Over There by The Miracles ...
fetching id for Sweet Little Sixteen by Jerry Lee Lewis ...
fetching id for Someday (When I'm Gone From You) by Bobby Vee and The Crickets ...
--> [error] The Boys' Night Out by Patti Page
fetching id for Try A Little Tenderness by Aretha Franklin ...
fetching id for Stop The Wedding by Etta James ...
fetching id for What's A Matter Baby (Is It Hurting You) by Timi Yuro ...
fetching id for Things by Bobby Darin ...
fetching id for Mr. Songwriter by Connie Stevens ...
fetching id for Baby Elephant Walk by Lawrence Welk And His Orchestra ...
fetching id for Vacation by Connie Francis ...
fetching id for Till Death Do Us Part by Bob Braun ...
--> [error] Baby Elephant Walk by Lawrence Welk And His Orchestra
fetching id for Glory Of Love by Don Gardner And Dee Dee Ford ...
fetching id for I Really Don't Want To Know by Solomon Burke ...
fetching id for Roses Are Red (My Love) by Bobby Vinton ...
fetching id for Little Diane by Dion ...
fetching id for Twist And Shout by The Isley Brothers ...
fetching id for Make It Easy On Yourself by Jerry Butler ...
fetching id for Bring It On Home To Me by Sam Cooke ...
fetching id for Call Me Mr. In-Between by Burl Ives ...
fetching id for Lolita Ya-Ya by The Ventures ...
fetching id for (Theme from) A Summer Place by Dick Roman ...
--> [error] Till Death Do Us Part by Bob Braun
fetching id for Beach Party by King Curtis And The Noble Knights ...
--> [error] (Theme from) A Summer Place by Dick Roman
fetching id for There Is No Greater Love by The Wanderers ...
--> [error] Beach Party by King Curtis And The Noble Knights
fetching id for Copy Cat by Gary U.S. Bonds ...
fetching id for Bonanza! by Johnny Cash ...
fetching id for Your Heart Belongs To Me by The Supremes ...
fetching id for Ahab, The Arab by Ray Stevens ...
fetching id for Sealed With A Kiss by Brian Hyland ...
fetching id for You'll Lose A Good Thing by Barbara Lynn ...
fetching id for Speedy Gonzales by Pat Boone ...
fetching id for Just Tell Her Jim Said Hello by Elvis Presley With The Jordanaires ...
--> [error] There Is No Greater Love by The Wanderers
fetching id for Heart In Hand by Brenda Lee ...
fetching id for The Wah Watusi by The Orlons ...
fetching id for Love Me As I Love You by George Maharis ...
fetching id for Wolverton Mountain by Claude King ...
fetching id for Jivin' Around by Al Casey Combo ...
fetching id for Reap What You Sow by Billy Stewart ...
fetching id for I Want To Be Loved by Dinah Washington ...
fetching id for Silly Boy (She Doesn't Love You) by The Lettermen ...
fetching id for Send For Me (If you need some Lovin) by Barbara George ...
fetching id for Mama (He Treats Your Daughter Mean) by Ruth Brown ...
fetching id for The Stripper by David Rose and His Orchestra ...
--> [error] Jivin' Around by Al Casey Combo
fetching id for (Girls, Girls, Girls) Made To Love by Eddie Hodges ...
fetching id for Theme From Dr. Kildare (Three Stars Will Shine Tonight) by Richard Chamberlain ...
fetching id for I Can't Stop Loving You by Ray Charles ...
fetching id for The Ballad Of Paladin by Duane Eddy ...
fetching id for Having A Party by Sam Cooke ...
fetching id for Too Late To Worry - Too Blue To Cry by Glen Campbell ...
fetching id for A Taste Of Honey by The Victor Feldman Quartet ...
fetching id for Oh! What It Seemed To Be by The Castells ...
fetching id for I Wouldn't Know (What To Do) by Dinah Washington ...
fetching id for For All We Know by Dinah Washington ...
fetching id for Johnny Get Angry by Joanie Sommers ...
fetching id for Dancin' Party by Chubby Checker ...
fetching id for I Need Your Loving by Don Gardner And Dee Dee Ford ...
fetching id for Have A Good Time by Sue Thompson ...
fetching id for So Wrong by Patsy Cline ...
fetching id for I'm Coming Home by Paul Anka ...
--> [error] A Taste Of Honey by The Victor Feldman Quartet
fetching id for Beach Party by Dave York and The Beachcombers ...
--> [error] I'm Coming Home by Paul Anka
fetching id for Till There Was You by Valjean on Piano ...
--> [error] Beach Party by Dave York and The Beachcombers
fetching id for Route 66 Theme by Nelson Riddle ...
fetching id for Little Red Rented Rowboat by Joe Dowell ...
fetching id for Gravy (For My Mashed Potatoes) by Dee Dee Sharp ...
fetching id for It Started All Over Again by Brenda Lee ...
--> [error] Till There Was You by Valjean on Piano
fetching id for Al Di La' by Emilio Pericoli ...
fetching id for Palisades Park by Freddy Cannon ...
fetching id for Bongo Stomp by Little Joey And The Flips ...
fetching id for Limbo Rock by The Champs ...
fetching id for Ben Crazy by Dickie Goodman ...
fetching id for Above The Stars by Mr. Acker Bilk ...
--> [error] It Started All Over Again by Brenda Lee
fetching id for I'll Never Dance Again by Bobby Rydell ...
fetching id for Mary's Little Lamb by James Darren ...
--> [error] Above The Stars by Mr. Acker Bilk
fetching id for The Bird Man by The Highwaymen ...
--> [error] Mary's Little Lamb by James Darren
fetching id for A Miracle by Frankie Avalon ...
fetching id for Careless Love by Ray Charles ...
fetching id for Right String But The Wrong Yo-Yo by Dr. Feelgood And The Interns ...
--> [error] The Bird Man by The Highwaymen
fetching id for Worried Mind by Ray Anthony ...
fetching id for Too Bad by Ben E. King ...
fetching id for Poor Little Puppet by Cathy Carroll ...
fetching id for Don't Worry 'Bout Me by Vincent Edwards ...
--> [error] Right String But The Wrong Yo-Yo by Dr. Feelgood And The Interns
fetching id for Limbo by The Capris ...
--> [error] Don't Worry 'Bout Me by Vincent Edwards
fetching id for Sweet Georgia Brown by Carroll Bros. ...
--> [error] Limbo by The Capris
fetching id for Welcome Home Baby by The Shirelles ...
fetching id for Summertime, Summertime by The Jamies ...
fetching id for It Keeps Right On A-Hurtin' by Johnny Tillotson ...
fetching id for Steel Men by Jimmy Dean ...
fetching id for Playboy by The Marvelettes ...
fetching id for Johnny Loves Me by Shelley Fabares ...
fetching id for Never In A Million Years by Linda Scott ...
fetching id for Why Did You Leave Me? by Vince Edwards ...
--> [error] Sweet Georgia Brown by Carroll Bros.
fetching id for Callin' Doctor Casey by John D. Loudermilk ...
fetching id for Sugar Plum by Ike Clanton ...
--> [error] Why Did You Leave Me? by Vince Edwards
fetching id for Snap Your Fingers by Joe Henderson ...
fetching id for I Don't Love You No More (I Don't Care About You) by Jimmy Norman ...
--> [error] Sugar Plum by Ike Clanton
fetching id for But Not For Me by Ketty Lester ...
fetching id for Little Bitty Pretty One by Clyde McPhatter ...
fetching id for Walk On The Wild Side (Part 1) by Jimmy Smith And The Big Band ...
fetching id for The Crowd by Roy Orbison ...
fetching id for Fortuneteller by Bobby Curtola ...
fetching id for West Of The Wall by Toni Fisher ...
fetching id for Cindy's Birthday by Johnny Crawford ...
fetching id for Seven Day Weekend by Gary U.S. Bonds ...
fetching id for My Daddy Is President by Little Jo Ann ...
--> [error] I Don't Love You No More (I Don't Care About You) by Jimmy Norman
fetching id for Stranger On The Shore by Mr. Acker Bilk ...
fetching id for Where Are You by Dinah Washington ...
fetching id for Swingin' Gently by Earl Grant ...
fetching id for La Bomba by The Tokens ...
--> [error] My Daddy Is President by Little Jo Ann
fetching id for Life's Too Short by The Lafayettes ...
--> [error] La Bomba by The Tokens
fetching id for Theme From "Hatari!" by Henry Mancini And His Orchestra ...
--> [error] Life's Too Short by The Lafayettes
fetching id for Come On Baby by Bruce Channel ...
--> [error] Theme From "Hatari!" by Henry Mancini And His Orchestra
fetching id for Houdini by Walter Brennan ...
--> [error] Come On Baby by Bruce Channel
fetching id for A Steel Guitar And A Glass Of Wine by Paul Anka ...
fetching id for Bristol Twistin' Annie by The Dovells ...
fetching id for If I Should Lose You by Dreamlovers ...
fetching id for Hot Pepper by Floyd Cramer ...
--> [error] Houdini by Walter Brennan
fetching id for Shout And Shimmy by James Brown And The Famous Flames ...
--> [error] Hot Pepper by Floyd Cramer
fetching id for Boom Boom by John Lee Hooker ...
fetching id for I Just Can't Help It by Jackie Wilson ...
fetching id for All Night Long by Sandy Nelson ...
fetching id for Sweet And Lovely by April Stevens & Nino Tempo ...
fetching id for Goodnight, Irene by Jerry Reed And The Hully Girlies ...
--> [error] Shout And Shimmy by James Brown And The Famous Flames
fetching id for Nothing New (Same Old Thing) by Fats Domino ...
fetching id for Little Young Lover by The Impressions ...
fetching id for I'm Tossin' And Turnin' Again by Bobby Lewis ...
fetching id for Stranger On The Shore by Andy Williams ...
fetching id for Sharing You by Bobby Vee ...
fetching id for (The Man Who Shot) Liberty Valance by Gene Pitney ...
fetching id for That's Old Fashioned (That's The Way Love Should Be) by The Everly Brothers ...
fetching id for Down In The Valley by Solomon Burke ...
fetching id for That Greasy Kid Stuff by Janie Grant ...
fetching id for I'm Hanging Up My Heart For You by Solomon Burke ...
fetching id for Keep Your Hands In Your Pockets by The Playmates ...
--> [error] Goodnight, Irene by Jerry Reed And The Hully Girlies
fetching id for Potato Peeler by Bobby Gregg and His Friends ...
--> [error] Keep Your Hands In Your Pockets by The Playmates
fetching id for Don't Cry, Baby by Aretha Franklin ...
fetching id for Dance With Mr. Domino by Fats Domino ...
fetching id for Goodbye Dad by The Castle Sisters ...
fetching id for I Love You by The Volume's ...
fetching id for The One Who Really Loves You by Mary Wells ...
fetching id for Any Day Now (My Wild Beautiful Bird) by Chuck Jackson ...
fetching id for I'll Try Something New by The Miracles ...
fetching id for Theme From Ben Casey by Valjean on Piano ...
--> [error] Potato Peeler by Bobby Gregg and His Friends
fetching id for Dr. Ben Basey by Mickey Shorr and The Cutups ...
--> [error] Theme From Ben Casey by Valjean on Piano
fetching id for Follow That Dream by Elvis Presley ...
fetching id for Why'd You Wanna Make Me Cry by Connie Stevens ...
--> [error] Dr. Ben Basey by Mickey Shorr and The Cutups
fetching id for Village Of Love by Nathaniel Mayer And The Fabulous Twilights ...
fetching id for Keep Your Love Locked (Deep In Your Heart) by Paul Petersen ...
fetching id for Good Lover by Jimmy Reed ...
fetching id for Where Have You Been (All My Life) by Arthur Alexander ...
--> [error] Why'd You Wanna Make Me Cry by Connie Stevens
fetching id for Born To Lose by Ray Charles ...
fetching id for Second Hand Love by Connie Francis ...
fetching id for Woman Is A Man's Best Friend by Teddy & The Twilights ...
fetching id for Lovers Who Wander by Dion ...
fetching id for So This Is Love by The Castells ...
fetching id for Don't Play That Song (You Lied) by Ben E. King ...
fetching id for Tennessee by Jan & Dean ...
fetching id for The Green Leaves Of Summer by Kenny Ball and his Jazzmen ...
fetching id for You Should'a Treated Me Right by Ike & Tina Turner ...
fetching id for Dancin' The Strand by Maureen Gray ...
fetching id for What Did Daddy Do by Shep And The Limelites ...
--> [error] Where Have You Been (All My Life) by Arthur Alexander
fetching id for Teach Me Tonight by George Maharis ...
fetching id for Twistin' Matilda (and the channel) by Jimmy Soul ...
fetching id for How Is Julie? by The Lettermen ...
fetching id for I Sold My Heart To The Junkman by The Blue-Belles ...
fetching id for Balboa Blue by The Marketts ...
fetching id for Mashed Potato Time by Dee Dee Sharp ...
fetching id for Shout! Shout! (Knock Yourself Out) by Ernie Maresca ...
fetching id for That Happy Feeling by Bert Kaempfert And His Orchestra ...
fetching id for Oh My Angel by Bertha Tillman ...
fetching id for Baby Elephant Walk by The Miniature Men ...
--> [error] I Sold My Heart To The Junkman by The Blue-Belles
fetching id for Shake A Hand by Ruth Brown ...
--> [error] Baby Elephant Walk by The Miniature Men
fetching id for My Time For Cryin' by Maxine Brown ...
fetching id for Cry Myself To Sleep by Del Shannon ...
fetching id for Soldier Boy by The Shirelles ...
fetching id for Uptown by The Crystals ...
fetching id for Lemon Tree by Peter, Paul & Mary ...
fetching id for Everybody Loves Me But You by Brenda Lee ...
fetching id for Night Train by James Brown And The Famous Flames ...
fetching id for When I Get Thru With You (You'll Love Me Too) by Patsy Cline ...
fetching id for Hit Record by Brook Benton ...
fetching id for My Real Name by Fats Domino ...
fetching id for Queen Of My Heart by Rene And Ray ...
fetching id for Doctor Feel-Good by Dr. Feelgood And The Interns ...
--> [error] Shake A Hand by Ruth Brown
fetching id for Adios Amigo by Jim Reeves ...
fetching id for Scotch And Soda by The Kingston Trio ...
fetching id for Lisa by Ferrante & Teicher ...
--> [error] Doctor Feel-Good by Dr. Feelgood And The Interns
fetching id for Air Travel by Ray And Bob ...
--> [error] Lisa by Ferrante & Teicher
fetching id for Old Rivers by Walter Brennan ...
fetching id for She Cried by Jay & The Americans ...
fetching id for Conscience by James Darren ...
fetching id for Funny Way Of Laughin' by Burl Ives ...
fetching id for Tell Me by Dick and DeeDee ...
--> [error] Air Travel by Ray And Bob
fetching id for I Wish That We Were Married by Ronnie and The Hi-Lites ...
fetching id for Caterina by Perry Como ...
fetching id for How Can I Meet Her? by The Everly Brothers ...
fetching id for Drummin' Up A Storm by Sandy Nelson ...
--> [error] Caterina by Perry Como
fetching id for Lipstick Traces (On A Cigarette) by Benny Spellman ...
fetching id for Marianna by Johnny Mathis ...
fetching id for Drum Stomp by Sandy Nelson ...
fetching id for Johnny Angel by Shelley Fabares ...
fetching id for P.T. 109 by Jimmy Dean ...
fetching id for Most People Get Married by Patti Page ...
fetching id for Good Luck Charm by Elvis Presley With The Jordanaires ...
fetching id for Moon River by Henry Mancini And His Orchestra ...
fetching id for Count Every Star by Linda Scott ...
--> [error] Moon River by Henry Mancini And His Orchestra
fetching id for Number One Man by Bruce Channel ...
--> [error] Count Every Star by Linda Scott
fetching id for Shout - Part I by Joey Dee & the Starliters ...
--> [error] Number One Man by Bruce Channel
fetching id for If I Cried Every Time You Hurt Me by Wanda Jackson ...
fetching id for Itty Bitty Pieces by James Ray ...
fetching id for That's My Desire by Yvonne Baker and the Sensations ...
--> [error] Shout - Part I by Joey Dee & the Starliters
fetching id for Meet Me At The Twistin' Place by Johnnie Morisette ...
fetching id for Let Me Be The One by The Paris Sisters ...
fetching id for Twist, Twist Senora by Gary U.S. Bonds ...
fetching id for You Are Mine by Frankie Avalon ...
fetching id for Lover Please by Clyde McPhatter ...
fetching id for Slow Twistin' by Chubby Checker (with Dee Dee Sharp) ...
fetching id for (I was) Born To Cry by Dion ...
fetching id for Soul Twist by King Curtis And The Noble Knights ...
fetching id for Hearts by Jackie Wilson ...
--> [error] That's My Desire by Yvonne Baker and the Sensations
fetching id for I'll Take You Home by The Corsairs Featuring Jay "Bird" Uzzell ...
--> [error] Hearts by Jackie Wilson
fetching id for I Found A Love by The Falcons & Band (Ohio Untouchables) ...
--> [error] I'll Take You Home by The Corsairs Featuring Jay "Bird" Uzzell
fetching id for Love Can't Wait by Marty Robbins ...
fetching id for Young World by Rick Nelson ...
fetching id for Blues (Stay Away From Me) by Ace Cannon ...
fetching id for Dear One by Larry Finnegan ...
fetching id for King Of Clowns by Neil Sedaka ...
fetching id for Love Letters by Ketty Lester ...
fetching id for I Will by Vic Dana ...
fetching id for Johnny Jingo by Hayley Mills ...
fetching id for Hide 'Nor Hair by Ray Charles and his Orchestra ...
--> [error] Love Can't Wait by Marty Robbins
fetching id for Ginny Come Lately by Brian Hyland ...
fetching id for What'd I Say (Part 1) by Bobby Darin ...
fetching id for She Can't Find Her Keys by Paul Petersen ...
fetching id for The Jam - Part 1 by Bobby Gregg and His Friends ...
--> [error] Hide 'Nor Hair by Ray Charles and his Orchestra
fetching id for Deep In The Heart Of Texas by Duane Eddy ...
fetching id for Here Comes That Feelin' by Brenda Lee ...
--> [error] The Jam - Part 1 by Bobby Gregg and His Friends
fetching id for Imagine That by Patsy Cline ...
fetching id for Dream by Dinah Washington ...
fetching id for Jane, Jane, Jane by The Kingston Trio ...
fetching id for Quando, Quando, Quando (Tell Me When) by Pat Boone ...
--> [error] Here Comes That Feelin' by Brenda Lee
fetching id for -twistin'-White Silver Sands by Bill Black's Combo ...
--> [error] Quando, Quando, Quando (Tell Me When) by Pat Boone
fetching id for The John Birch Society by The Chad Mitchell Trio ...
--> [error] -twistin'-White Silver Sands by Bill Black's Combo
fetching id for Love Me Warm And Tender by Paul Anka ...
fetching id for Cinderella by Jack Ross ...
--> [error] The John Birch Society by The Chad Mitchell Trio
fetching id for Nut Rocker by B. Bumble & The Stingers ...
fetching id for Twistin' The Night Away by Sam Cooke ...
fetching id for At The Club by Ray Charles and his Orchestra ...
--> [error] Cinderella by Jack Ross
fetching id for You Better Move On by Arthur Alexander ...
fetching id for Two Of A Kind by Sue Thompson ...
fetching id for Something's Got A Hold On Me by Etta James ...
fetching id for Moments by Jennell Hawkins ...
fetching id for Runaway by Lawrence Welk And His Orchestra ...
--> [error] At The Club by Ray Charles and his Orchestra
fetching id for Stranger On The Shore by The Drifters ...
fetching id for Memories Of Maria by Jerry Byrd ...
--> [error] Runaway by Lawrence Welk And His Orchestra
fetching id for You're Nobody 'til Somebody Loves You by Dinah Washington ...
--> [error] Memories Of Maria by Jerry Byrd
fetching id for Operator by Gladys Knight And The Pips ...
fetching id for Midnight In Moscow by Kenny Ball and his Jazzmen ...
fetching id for Anything That's Part Of You by Elvis Presley With The Jordanaires ...
fetching id for Let Me In by The Sensations ...
fetching id for Dream Baby (How Long Must I Dream) by Roy Orbison ...
fetching id for You Talk About Love by Barbara George ...
fetching id for Hey! Baby by Bruce Channel ...
fetching id for The Big Draft by The Four Preps ...
fetching id for Tra La La La La by Ike & Tina Turner ...
fetching id for Don't Break The Heart That Loves You by Connie Francis ...
fetching id for Please Don't Ask About Barbara by Bobby Vee ...
fetching id for What's Your Name by Don & Juan ...
fetching id for Patricia - Twist by Perez Prado And His Orchestra ...
fetching id for Thou Shalt Not Steal by John D. Loudermilk ...
fetching id for (What A Sad Way) To Love Someone by Ral Donner ...
--> [error] Patricia - Twist by Perez Prado And His Orchestra
fetching id for Walk On With The Duke by The Duke Of Earl ...
--> [error] (What A Sad Way) To Love Someone by Ral Donner
fetching id for (I've Got) Bonnie by Bobby Rydell ...
fetching id for Come Back Silly Girl by The Lettermen ...
fetching id for Annie Get Your Yo-Yo by Little Junior Parker ...
fetching id for When My Little Girl Is Smiling by The Drifters ...
fetching id for Cry Baby Cry by The Angels ...
fetching id for Patti Ann by Johnny Crawford ...
fetching id for You Win Again by Fats Domino ...
fetching id for The Ballad Of Thunder Road by Robert Mitchum ...
fetching id for Lovesick Blues by Floyd Cramer ...
fetching id for You Don't Miss Your Water by William Bell ...
fetching id for Where Have All The Flowers Gone by The Kingston Trio ...
fetching id for She's Got You by Patsy Cline ...
fetching id for The Alvin Twist by The Chipmunks With David Seville ...
--> [error] Walk On With The Duke by The Duke Of Earl
fetching id for Duke Of Earl by Gene Chandler ...
fetching id for If A Woman Answers (Hang Up The Phone) by Leroy Van Dyke ...
fetching id for Chapel By The Sea by Billy Vaughn And His Orchestra ...
fetching id for Midnight Special, Part 1 by Jimmy Smith ...
--> [error] Chapel By The Sea by Billy Vaughn And His Orchestra
fetching id for The Rains Came by Big Sambo and The House Wreckers ...
fetching id for The White Rose Of Athens by David Carroll And His Orchestra ...
--> [error] Midnight Special, Part 1 by Jimmy Smith
fetching id for What Am I Supposed To Do by Ann-Margret ...
fetching id for Lollipops And Roses by Jack Jones ...
fetching id for I Found Love by Jackie Wilson & Linda Hopkins ...
--> [error] The White Rose Of Athens by David Carroll And His Orchestra
fetching id for Who Will The Next Fool Be by Bobby Bland ...
fetching id for Funny by Gene McDaniels ...
fetching id for Her Royal Majesty by James Darren ...
--> [error] I Found Love by Jackie Wilson & Linda Hopkins
fetching id for Jamie by Eddie Holland ...
fetching id for Tuff by Ace Cannon ...
fetching id for Ev'rybody's Twistin' by Frank Sinatra ...
fetching id for March Of The Siamese Children by Kenny Ball and his Jazzmen ...
fetching id for I'm On My Way by The Highwaymen ...
--> [error] Ev'rybody's Twistin' by Frank Sinatra
fetching id for Sugar Blues by Ace Cannon ...
fetching id for Doin' The Continental Walk by Danny & The Juniors ...
--> [error] I'm On My Way by The Highwaymen
fetching id for Shout - Part 1 by The Isley Brothers ...
fetching id for Cookin' by Al Casey Combo ...
fetching id for Smoky Places by The Corsairs Featuring Jay "Bird" Uzzell ...
fetching id for Percolator (Twist) by Billy Joe & The Checkmates ...
fetching id for The Wanderer by Dion ...
fetching id for Break It To Me Gently by Brenda Lee ...
fetching id for Crying In The Rain by The Everly Brothers ...
fetching id for Pop-Eye by Huey Smith ...
fetching id for The Twist by Chubby Checker ...
fetching id for Our Anniversary by Shep And The Limelites ...
fetching id for A Girl Has To Know by The G-Clefs ...
fetching id for Amor by Roger Williams ...
fetching id for Smile by Ferrante & Teicher ...
--> [error] Smile by Ferrante & Teicher
fetching id for Honky-Tonk Man by Johnny Horton ...
--> [error] Doin' The Continental Walk by Danny & The Juniors
fetching id for Sweet Thursday by Johnny Mathis ...
fetching id for Lover Come Back by Doris Day ...
fetching id for I'm Blue (The Gong-Gong Song) by The Ikettes ...
fetching id for Dear Lady Twist by Gary U.S. Bonds ...
fetching id for Cry To Me by Solomon Burke ...
fetching id for Cotton Fields by The Highwaymen ...
fetching id for My Boomerang Won't Come Back by Charlie Drake ...
fetching id for Chip Chip by Gene McDaniels ...
fetching id for Walk On The Wild Side by Brook Benton ...
fetching id for He Knows I Love Him Too Much by The Paris Sisters ...
fetching id for (Do The New) Continental by The Dovells ...
fetching id for Yes Indeed by Pete Fountain ...
fetching id for Ain't That Loving You by Bobby Bland ...
fetching id for Nite Owl by Dukays ...
--> [error] Ain't That Loving You by Bobby Bland
fetching id for Pop-Eye Stroll by Mar-Keys ...
fetching id for Play The Thing by Marlowe Morris Quintet ...
--> [error] Nite Owl by Dukays
fetching id for Drums Are My Beat by Sandy Nelson ...
fetching id for Norman by Sue Thompson ...
fetching id for Hey, Let's Twist by Joey Dee & the Starliters ...
fetching id for Baby It's You by The Shirelles ...
fetching id for A Little Bitty Tear by Burl Ives ...
fetching id for Peppermint Twist - Part I by Joey Dee & the Starliters ...
--> [error] Play The Thing by Marlowe Morris Quintet
fetching id for Twistin' Postman by The Marvelettes ...
fetching id for I Know (You Don't Love Me No More) by Barbara George ...
fetching id for Afrikaan Beat by Bert Kaempfert And His Orchestra ...
fetching id for La Paloma Twist by Chubby Checker ...
fetching id for Surfin by The Beach Boys ...
fetching id for Yessiree by Linda Scott ...
fetching id for Roly Poly by Joey Dee & the Starliters ...
fetching id for I'll See You In My Dreams by Pat Boone ...
--> [error] Peppermint Twist - Part I by Joey Dee & the Starliters
fetching id for Guitar Boogie Shuffle Twist by The Virtues ...
--> [error] I'll See You In My Dreams by Pat Boone
fetching id for Stardust by Frank Sinatra ...
fetching id for The Wonderful World Of The Young by Andy Williams ...
fetching id for The Cajun Queen by Jimmy Dean ...
fetching id for To A Sleeping Beauty by Jimmy Dean ...
fetching id for Lizzie Borden by The Chad Mitchell Trio ...
--> [error] Guitar Boogie Shuffle Twist by The Virtues
fetching id for That's My Pa by Sheb Wooley ...
fetching id for What's So Good About Good-by by The Miracles ...
fetching id for Mashed Potatoes (Part 1) by Steve Alaimo ...
fetching id for Bermuda by Linda Scott ...
fetching id for Summertime by Rick Nelson ...
fetching id for Teen Queen Of The Week by Freddy Cannon ...
fetching id for Duchess Of Earl by Pearlettes ...
fetching id for The Moon Was Yellow by Frank Sinatra ...
fetching id for Can't Help Falling In Love by Elvis Presley With The Jordanaires ...
fetching id for Chattanooga Choo Choo by Floyd Cramer ...
fetching id for Town Without Pity by Gene Pitney ...
fetching id for The Greatest Hurt by Jackie Wilson ...
fetching id for Irresistible You by Bobby Darin ...
--> [error] Lizzie Borden by The Chad Mitchell Trio
fetching id for Ecstasy by Ben E. King ...
fetching id for Dreamy Eyes by Johnny Tillotson ...
fetching id for B'wa Nina (Pretty Girl) by The Tokens ...
--> [error] Irresistible You by Bobby Darin
fetching id for Shadrack by Brook Benton ...
fetching id for Surfer's Stomp by The Marketts ...
fetching id for She's Everything (I Wanted You To Be) by Ral Donner ...
fetching id for Pictures In The Fire by Pat Boone ...
--> [error] B'wa Nina (Pretty Girl) by The Tokens
fetching id for Blue Water Line by The Brothers Four ...
fetching id for Lose Her by Bobby Rydell ...
--> [error] Pictures In The Fire by Pat Boone
fetching id for Tears And Laughter by Dinah Washington ...
fetching id for Do You Know How To Twist by Hank Ballard ...
fetching id for Ida Jane by Fats Domino ...
fetching id for Baby It's Cold Outside by Ray Charles & Betty Carter ...
--> [error] Lose Her by Bobby Rydell
fetching id for Aw Shucks, Hush Your Mouth by Jimmy Reed ...
fetching id for My Melancholy Baby by The Marcels ...
fetching id for Letter Full Of Tears by Gladys Knight And The Pips ...
fetching id for Bandit Of My Dreams by Eddie Hodges ...
fetching id for Let Me Call You Sweetheart by Timi Yuro ...
fetching id for Oliver Twist by Rod McKuen ...
fetching id for Love Is The Sweetest Thing by Saverio Saridis ...
fetching id for I Surrender, Dear by Aretha Franklin ...
fetching id for Joey Baby by Anita & Th' So-And-So's ...
--> [error] Love Is The Sweetest Thing by Saverio Saridis
fetching id for (Quarter To Four) Stomp by The Stompers ...
--> [error] Joey Baby by Anita & Th' So-And-So's
fetching id for The Lion Sleeps Tonight by The Tokens ...
fetching id for Multiplication by Bobby Darin ...
fetching id for Run To Him by Bobby Vee ...
fetching id for When I Fall In Love by The Lettermen ...
fetching id for If You Gotta Make A Fool Of Somebody by James Ray ...
fetching id for So Deep by Brenda Lee ...
fetching id for The Birth Of The Beat by Sandy Nelson ...
fetching id for Midnight by Johnny Gibson ...
fetching id for Shimmy, Shimmy Walk, Part 1 by The Megatons ...
--> [error] (Quarter To Four) Stomp by The Stompers
fetching id for It's Magic by The Platters ...
fetching id for I Can't Say Goodbye by Bobby Vee ...
fetching id for Popeye Joe by Ernie K-Doe ...
fetching id for Do-Re-Mi by Lee Dorsey ...
fetching id for Twist-Her by Bill Black's Combo ...
fetching id for Let There Be Drums by Sandy Nelson ...
fetching id for Walk On By by Leroy Van Dyke ...
fetching id for Happy Birthday, Sweet Sixteen by Neil Sedaka ...
fetching id for Unchain My Heart by Ray Charles and his Orchestra ...
--> [error] Shimmy, Shimmy Walk, Part 1 by The Megatons
fetching id for Happy Jose (Ching-Ching) by Jack Ross ...
fetching id for Dear Ivan by Jimmy Dean ...
fetching id for Go On Home by Patti Page ...
fetching id for Funny How Time Slips Away by Jimmy Elledge ...
fetching id for The Majestic by Dion ...
fetching id for What's The Reason by Bobby Edwards ...
fetching id for Strange by Patsy Cline ...
fetching id for Grow Closer Together by The Impressions ...
fetching id for Goodbye Cruel World by James Darren ...
fetching id for Pocketful Of Miracles by Frank Sinatra ...
fetching id for Turn On Your Love Light by Bobby Bland ...
fetching id for Poor Fool by Ike & Tina Turner ...
fetching id for When The Boy In Your Arms (Is The Boy In Your Heart) by Connie Francis ...
fetching id for Small Sad Sam by Phil McLean ...
fetching id for Flying Circle by Frank Slay And His Orchestra ...
--> [error] What's The Reason by Bobby Edwards
fetching id for Please Mr. Postman by The Marvelettes ...
fetching id for I Could Have Loved You So Well by Ray Peterson ...
fetching id for A Little Too Much by Clarence Henry ...
fetching id for Let's Go by Floyd Cramer ...
--> [error] Flying Circle by Frank Slay And His Orchestra
fetching id for Rough Lover by Aretha Franklin ...
fetching id for Sugar Babe by Buster Brown ...
fetching id for Rock-A-Hula Baby by Elvis Presley With The Jordanaires ...
--> [error] Let's Go by Floyd Cramer
fetching id for Maria by Roger Williams ...
fetching id for Let's Twist Again by Chubby Checker ...
fetching id for There's No Other (Like My Baby) by The Crystals ...
fetching id for I Don't Know Why by Linda Scott ...
fetching id for Moon River by Jerry Butler ...
fetching id for There'll Be No Next Time by Jackie Wilson ...
fetching id for Tears From An Angel by Troy Shondell ...
fetching id for Twistin' All Night Long by Danny & The Juniors with Freddy Cannon ...
fetching id for A Little Bitty Tear by Wanda Jackson ...
fetching id for Tequila Twist by The Champs ...
fetching id for Revenge by Brook Benton ...
fetching id for Jambalaya (On The Bayou) by Fats Domino ...
fetching id for Gypsy Woman by The Impressions ...
fetching id for 'Til by The Angels ...
fetching id for Walkin' With My Angel by Bobby Vee ...
fetching id for Let's Go Trippin' by Dick Dale and The Del-Tones ...
fetching id for Just Got To Know by Jimmy McCracklin ...
fetching id for I Told The Brook by Marty Robbins ...
--> [error] I Don't Know Why by Linda Scott
fetching id for The Door Is Open by Tommy Hunt ...
fetching id for Portrait Of A Fool by Conway Twitty ...
fetching id for Little Altar Boy by Vic Dana ...
fetching id for Hey! Little Girl by Del Shannon ...
fetching id for Johnny Will by Pat Boone ...
fetching id for Turn Around, Look At Me by Glen Campbell ...
fetching id for Up A Lazy River by Si Zentner And His Orchestra ...
--> [error] I Told The Brook by Marty Robbins
fetching id for Your Ma Said You Cried In Your Sleep Last Night by Kenny Dino ...
fetching id for But On The Other Hand Baby by Ray Charles and his Orchestra ...
--> [error] Up A Lazy River by Si Zentner And His Orchestra
fetching id for Big Bad John by Jimmy Dean ...
fetching id for I Need Some One by The Belmonts ...
fetching id for The Lost Penny by Brook Benton ...
--> [error] But On The Other Hand Baby by Ray Charles and his Orchestra
fetching id for Room Full Of Tears by The Drifters ...
fetching id for The Basie Twist by Count Basie & His Orch. ...
--> [error] The Lost Penny by Brook Benton
fetching id for A Sunday Kind Of Love by Jan & Dean ...
fetching id for Tonight by Ferrante & Teicher ...
fetching id for Well, I Told You by The Chantels ...
fetching id for Just Out Of Reach (Of My Two Open Arms) by Solomon Burke ...
fetching id for The Gypsy Rover by The Highwaymen ...
--> [error] The Basie Twist by Count Basie & His Orch.
fetching id for Baby's First Christmas by Connie Francis ...
fetching id for Pop Goes The Weasel by Anthony Newley ...
fetching id for Unsquare Dance by The Dave Brubeck Quartet ...
fetching id for Island In The Sky by Troy Shondell ...
fetching id for Searching by Jack Eubanks ...
fetching id for The Comancheros by Claude King ...
fetching id for Lonesome Number One by Don Gibson ...
fetching id for Fool #1 by Brenda Lee ...
fetching id for Soothe Me by Sims Twins ...
--> [error] The Gypsy Rover by The Highwaymen
fetching id for I Understand (Just How You Feel) by The G-Clefs ...
fetching id for Language Of Love by John D. Loudermilk ...
fetching id for Please Come Home For Christmas by Charles Brown ...
fetching id for Maria by Johnny Mathis ...
fetching id for It's All Because by Linda Scott ...
fetching id for Crazy by Patsy Cline ...
fetching id for Pushin' Your Luck by Sleepy King ...
--> [error] Soothe Me by Sims Twins
fetching id for The Twist by Ernie Freeman ...
fetching id for The Waltz You Saved For Me by Ferlin Husky ...
fetching id for The Little Drummer Boy by The Jack Halloran Singers ...
--> [error] Pushin' Your Luck by Sleepy King
fetching id for Motorcycle by Tico And The Triumphs ...
fetching id for Drown In My Own Tears by Don Shirley ...
fetching id for Runaround Sue by Dion ...
fetching id for September In The Rain by Dinah Washington ...
fetching id for Bristol Stomp by The Dovells ...
fetching id for Heartaches by The Marcels ...
fetching id for Tower Of Strength by Gene McDaniels ...
fetching id for Twistin' U.S.A. by Chubby Checker ...
fetching id for The Peppermint Twist by Danny Peppermint and the Jumping Jacks ...
--> [error] The Little Drummer Boy by The Jack Halloran Singers
fetching id for Free Me by Johnny Preston ...
fetching id for Santa & The Touchables by Dickie Goodman ...
fetching id for Ev'rybody's Cryin' by Jimmie Beaumont ...
fetching id for The Fly by Chubby Checker ...
fetching id for You're The Reason by Bobby Edwards ...
fetching id for In The Middle Of A Heartache by Wanda Jackson ...
fetching id for It's Too Soon To Know by Etta James ...
fetching id for Three Steps From The Altar by Shep And The Limelites ...
fetching id for Never, Never by The Jive Five With Joe Rene And Orchestra ...
--> [error] Ev'rybody's Cryin' by Jimmie Beaumont
fetching id for Tennessee Flat-Top Box by Johnny Cash ...
fetching id for Seven Day Fool by Etta James ...
fetching id for Everybody's Gotta Pay Some Dues by The Miracles ...
fetching id for You Don't Have To Be A Tower Of Strength by Gloria Lynne ...
fetching id for I Love How You Love Me by The Paris Sisters ...
fetching id for This Time by Troy Shondell ...
fetching id for A Wonder Like You by Rick Nelson ...
fetching id for I Wanna Thank You by Bobby Rydell ...
fetching id for Tonight by Eddie Fisher ...
fetching id for Foot Stomping - Part 1 by The Flares ...
fetching id for Smile by Timi Yuro ...
fetching id for God, Country And My Baby by Johnny Burnette ...
fetching id for I Hear You Knocking by Fats Domino ...
fetching id for Sometime by Gene Thomas ...
fetching id for Happy Times (Are Here To Stay) by Tony Orlando ...
fetching id for Losing Your Love by Jim Reeves ...
fetching id for (How Can I Write On Paper) What I Feel In My Heart by Jim Reeves ...
--> [error] Sometime by Gene Thomas
fetching id for She Really Loves You by Timi Yuro ...
fetching id for Sad Movies (Make Me Cry) by Sue Thompson ...
fetching id for Hit The Road Jack by Ray Charles and his Orchestra ...
--> [error] (How Can I Write On Paper) What I Feel In My Heart by Jim Reeves
fetching id for Everlovin' by Rick Nelson ...
fetching id for Rock-A-Bye Your Baby With A Dixie Melody by Aretha Franklin ...
fetching id for Let's Get Together by Hayley Mills and Hayley Mills ...
fetching id for Ya Ya by Lee Dorsey ...
fetching id for I Cried My Last Tear by Ernie K-Doe ...
fetching id for A Certain Girl by Ernie K-Doe ...
fetching id for Danny Boy by Andy Williams ...
fetching id for What A Walk by Bobby Lewis ...
fetching id for You're Following Me by Perry Como ...
fetching id for It Do Me So Good by Ann-Margret ...
fetching id for Walkin' Back To Happiness by Helen Shapiro ...
fetching id for Crying by Roy Orbison ...
fetching id for Under The Moon Of Love by Curtis Lee ...
fetching id for Anybody But Me by Brenda Lee ...
fetching id for Candy Man by Roy Orbison ...
fetching id for Big John by The Shirelles ...
--> [error] You're Following Me by Perry Como
fetching id for The Way I Am by Jackie Wilson ...
fetching id for Take Five by The Dave Brubeck Quartet ...
fetching id for On Bended Knees by Clarence Henry ...
fetching id for My Heart Belongs To Only You by Jackie Wilson ...
fetching id for The Bridge Of Love by Joe Dowell ...
fetching id for The Way You Look Tonight by The Lettermen ...
fetching id for Blue Moon by The Ventures ...
fetching id for I'll Be Seeing You by Frank Sinatra ...
fetching id for I'll Never Stop Wanting You by Brian Hyland ...
fetching id for I Wonder (If Your Love Will Ever Belong To Me) by The Pentagons ...
fetching id for Steps 1 And 2 by Jack Scott ...
fetching id for Greetings (This is Uncle Sam) by Valadiers ...
fetching id for Young Boy Blues by Ben E. King ...
fetching id for Sweets For My Sweet by The Drifters ...
fetching id for (He's My) Dreamboat by Connie Francis ...
fetching id for So Long Baby by Del Shannon ...
fetching id for Please Don't Go by Ral Donner ...
fetching id for School Is In by Gary U.S. Bonds ...
fetching id for What A Party by Fats Domino ...
fetching id for I Really Love You by The Stereos ...
fetching id for Mexico by Bob Moore and His Orch. ...
--> [error] My Heart Belongs To Only You by Jackie Wilson
fetching id for For Me And My Gal by Freddy Cannon ...
fetching id for Somewhere Along The Way by Steve Lawrence ...
fetching id for Stick Shift by Duals ...
fetching id for Love (I'm So Glad) I Found You by The Spinners ...
fetching id for Don't Blame Me by The Everly Brothers ...
fetching id for Look In My Eyes by The Chantels ...
fetching id for You Must Have Been A Beautiful Baby by Bobby Darin ...
fetching id for Take Good Care Of My Baby by Bobby Vee ...
fetching id for Little Sister by Elvis Presley ...
fetching id for Morning After by Mar-Keys ...
fetching id for Bright Lights Big City by Jimmy Reed ...
fetching id for Come September by Billy Vaughn And His Orchestra ...
fetching id for Fly By Night by Andy Williams ...
--> [error] Fly By Night by Andy Williams
fetching id for The Door To Paradise by Bobby Rydell ...
--> [error] Come September by Billy Vaughn And His Orchestra
fetching id for Hollywood by Connie Francis ...
fetching id for The Mountain's High by Dick and DeeDee ...
fetching id for My True Story by The Jive Five With Joe Rene And Orchestra ...
fetching id for Movin' by Bill Black's Combo ...
fetching id for (Marie's The Name) His Latest Flame by Elvis Presley ...
fetching id for Missing You by Ray Peterson ...
fetching id for Berlin Melody by Billy Vaughn And His Orchestra ...
--> [error] The Door To Paradise by Bobby Rydell
fetching id for Your Last Goodbye by Floyd Cramer ...
fetching id for It's Gonna Work Out Fine by Ike & Tina Turner ...
fetching id for It's Your World by Marty Robbins ...
fetching id for Human by Tommy Hunt ...
fetching id for Sad Movies (Make Me Cry) by The Lennon Sisters ...
--> [error] Berlin Melody by Billy Vaughn And His Orchestra
fetching id for Broken Heart And A Pillow Filled With Tears by Patti Page ...
--> [error] Sad Movies (Make Me Cry) by The Lennon Sisters
fetching id for Just Because by The McGuire Sisters ...
fetching id for It's Just A House Without You by Brook Benton ...
fetching id for (He's) The Great Impostor by The Fleetwoods ...
--> [error] Broken Heart And A Pillow Filled With Tears by Patti Page
fetching id for One Track Mind by Bobby Lewis ...
fetching id for Bless You by Tony Orlando ...
fetching id for Michael by The Highwaymen ...
fetching id for Without You by Johnny Tillotson ...
fetching id for The Astronaut (Parts 1 & 2) by Jose Jimenez ...
--> [error] (He's) The Great Impostor by The Fleetwoods
fetching id for Don't Get Around Much Anymore by The Belmonts ...
fetching id for Don't Cry No More by Bobby Bland ...
fetching id for Tonight I Won't Be There by Adam Wade ...
--> [error] The Astronaut (Parts 1 & 2) by Jose Jimenez
fetching id for Backtrack by Faron Young ...
fetching id for Why Not Now by Matt Monro ...
fetching id for Who Put The Bomp (In The Bomp, Bomp, Bomp) by Barry Mann ...
fetching id for Here Comes The Night by Ben E. King ...
fetching id for Let True Love Begin by Nat King Cole ...
fetching id for More Money For You And Me by The Four Preps ...
fetching id for Wasn't The Summer Short? by Johnny Mathis ...
fetching id for Muskrat by The Everly Brothers ...
fetching id for I Apologize by Timi Yuro ...
fetching id for Who Can I Count On by Patsy Cline ...
fetching id for Does Your Chewing Gum Lose It's Flavor (On The Bedpost Over Night) by Lonnie Donegan And His Skiffle Group ...
--> [error] Tonight I Won't Be There by Adam Wade
fetching id for Let Me Belong To You by Brian Hyland ...
fetching id for Water Boy by Don Shirley Trio ...
fetching id for Big Cold Wind by Pat Boone ...
--> [error] Does Your Chewing Gum Lose It's Flavor (On The Bedpost Over Night) by Lonnie Donegan And His Skiffle Group
fetching id for When We Get Married by The Dreamlovers ...
fetching id for You Don't Know What It Means by Jackie Wilson ...
--> [error] Big Cold Wind by Pat Boone
fetching id for Back To The Hop by Danny & The Juniors ...
--> [error] You Don't Know What It Means by Jackie Wilson
fetching id for Rockin' Bicycle by Fats Domino ...
fetching id for Late Date by The Parkays ...
--> [error] Back To The Hop by Danny & The Juniors
fetching id for Guilty Of Loving You by Jerry Fuller ...
--> [error] Late Date by The Parkays
fetching id for Make Believe Wedding by The Castells ...
fetching id for A Little Bit Of Soap by The Jarmels ...
fetching id for Hurt by Timi Yuro ...
fetching id for Wooden Heart by Joe Dowell ...
fetching id for I Wake Up Crying by Chuck Jackson ...
fetching id for I Just Don't Understand by Ann-Margret ...
--> [error] Guilty Of Loving You by Jerry Fuller
fetching id for Sweet Little You by Neil Sedaka ...
fetching id for Cinderella by Paul Anka ...
fetching id for For Sentimental Reasons by The Cleftones ...
fetching id for Frankie And Johnny by Brook Benton ...
fetching id for Lover's Island by The Blue Jays ...
fetching id for Kissin' On The Phone by Paul Anka ...
fetching id for Riders In The Sky by Lawrence Welk His Orchestra And Chorus ...
--> [error] Kissin' On The Phone by Paul Anka
fetching id for Laugh by The Velvets featuring Virgil Johnson ...
--> [error] Riders In The Sky by Lawrence Welk His Orchestra And Chorus
fetching id for Honky Train by Bill Black's Combo ...
fetching id for Hang On by Floyd Cramer ...
fetching id for Image - Part 1 by Hank Levine And Orchestra ...
fetching id for Last Night by Mar-Keys ...
fetching id for School Is Out by Gary U.S. Bonds ...
fetching id for Don't Bet Money Honey by Linda Scott ...
--> [error] Image - Part 1 by Hank Levine And Orchestra
fetching id for I Fall To Pieces by Patsy Cline ...
fetching id for Amor by Ben E. King ...
fetching id for Let The Four Winds Blow by Fats Domino ...
fetching id for Juke Box Saturday Night by Nino & The Ebb Tides ...
fetching id for Magic Moon (Clair De Lune) by The Rays ...
--> [error] Don't Bet Money Honey by Linda Scott
fetching id for Music, Music, Music by The Sensations Featuring Yvonne ...
fetching id for Magic Is The Night by Kathy Young With The Innocents ...
--> [error] Magic Moon (Clair De Lune) by The Rays
fetching id for True, True Love by Frankie Avalon ...
fetching id for Pocketful Of Rainbows by Deane Hawley ...
fetching id for Linda by Adam Wade ...
--> [error] Pocketful Of Rainbows by Deane Hawley
fetching id for Impossible by Gloria Lynne ...
fetching id for You Don't Know What You've Got (Until You Lose It) by Ral Donner ...
fetching id for Years From Now by Jackie Wilson ...
--> [error] Linda by Adam Wade
fetching id for Jeremiah Peabody's Poly Unsaturated Quick Dissolving Fast Acting Pleasant T by Ray Stevens ...
fetching id for As If I Didn't Know by Adam Wade ...
fetching id for Tossin' And Turnin' by Bobby Lewis ...
fetching id for Wizard Of Love by The Ly - Dells ...
--> [error] Years From Now by Jackie Wilson
fetching id for Every Breath I Take by Gene Pitney ...
fetching id for Nag by The Halos ...
fetching id for Baby You're So Fine by Mickey & Sylvia ...
fetching id for My Blue Heaven by Duane Eddy And The Rebels ...
--> [error] Wizard Of Love by The Ly - Dells
fetching id for A Little Dog Cried by Jimmie Rodgers ...
fetching id for I Don't Like It Like That by The Bobbettes ...
fetching id for Roll Over Beethoven by The Velaires ...
fetching id for Summer Souvenirs by Karl Hammel, Jr. ...
--> [error] My Blue Heaven by Duane Eddy And The Rebels
fetching id for Faraway Star by The Chordettes ...
fetching id for It's All Right by Sam Cooke ...
fetching id for Johnny Willow by Fred Darian ...
fetching id for I'll Never Smile Again by The Platters ...
fetching id for Don't Cry, Baby by Etta James ...
fetching id for I Like It Like That, Part 1 by Chris Kenner ...
--> [error] Summer Souvenirs by Karl Hammel, Jr.
fetching id for Sea Of Heartbreak by Don Gibson ...
fetching id for I Dreamed Of A Hill-Billy Heaven by Tex Ritter ...
fetching id for Transistor Sister by Freddy Cannon ...
fetching id for Night Train by Richard Hayman And His Orchestra ...
--> [error] I Like It Like That, Part 1 by Chris Kenner
fetching id for My Dream Come True by Jack Scott ...
fetching id for Keep On Dancing by Hank Ballard And The Midnighters ...
fetching id for Take My Love (I Want To Give It All To You) by Little Willie John ...
fetching id for Anniversary Of Love by The Caslons ...
fetching id for Signed, Sealed And Delivered by Rusty Draper ...
--> [error] Night Train by Richard Hayman And His Orchestra
fetching id for Girl In My Dreams by The Capris ...
fetching id for Dear Mr. D.J. Play It Again by Tina Robin ...
--> [error] Signed, Sealed And Delivered by Rusty Draper
fetching id for I Love You Yes I Do by Bull Moose Jackson ...
fetching id for Panic by Otis Williams And His Charms ...
--> [error] Dear Mr. D.J. Play It Again by Tina Robin
fetching id for I'm Gonna Knock On Your Door by Eddie Hodges ...
fetching id for Pretty Little Angel Eyes by Curtis Lee ...
fetching id for Baby, You're Right by James Brown And The Famous Flames ...
fetching id for Now And Forever by Bert Kaempfert And His Orchestra ...
--> [error] Panic by Otis Williams And His Charms
fetching id for Starlight, Starbright by Linda Scott ...
--> [error] Now And Forever by Bert Kaempfert And His Orchestra
fetching id for I Don't Want To Take A Chance by Mary Wells ...
fetching id for Lonely Street by Clarence Henry ...
fetching id for I'm A Telling You by Jerry Butler ...
fetching id for Romeo by Janie Grant ...
fetching id for Donald Where's Your Troosers? by Andy Stewart ...
fetching id for Starlight by The Preludes Five ...
--> [error] Starlight, Starbright by Linda Scott
fetching id for (Theme From) Silver City by The Ventures ...
--> [error] Starlight by The Preludes Five
fetching id for Well-A, Well-A by Shirley & Lee ...
fetching id for I Can't Take It by Mary Ann Fisher ...
--> [error] (Theme From) Silver City by The Ventures
fetching id for Pitter-Patter by The Four Sportsmen ...
fetching id for Dum Dum by Brenda Lee ...
fetching id for Together by Connie Francis ...
fetching id for Back Beat No. 1 by The Rondels ...
fetching id for Princess by Frank Gari ...
--> [error] I Can't Take It by Mary Ann Fisher
fetching id for San-Ho-Zay by Freddy King ...
fetching id for Nothing But Good by Hank Ballard And The Midnighters ...
--> [error] Princess by Frank Gari
fetching id for Big River, Big Man by Claude King ...
fetching id for You're The Reason by Joe South ...
fetching id for Right Or Wrong by Wanda Jackson ...
fetching id for Never On Sunday by The Chordettes ...
fetching id for One Summer Night by The Diamonds ...
fetching id for I'll Be There by Damita Jo ...
fetching id for Hats Off To Larry by Del Shannon ...
fetching id for Hully Gully Again by Little Caesar and The Romans ...
fetching id for A Thing Of The Past by The Shirelles ...
fetching id for Teardrops In My Heart by Joe Barry ...
--> [error] Hully Gully Again by Little Caesar and The Romans
fetching id for My Kind Of Girl by Matt Monro ...
fetching id for Quarter To Three by U.S. Bonds ...
fetching id for Black Land Farmer by Wink Martindale ...
--> [error] Teardrops In My Heart by Joe Barry
fetching id for My Heart's On Fire by Billy Bland ...
fetching id for The Boll Weevil Song by Brook Benton ...
fetching id for Don't Forget I Love You by The Butanes ...
--> [error] Black Land Farmer by Wink Martindale
fetching id for Quite A Party by The Fireballs ...
fetching id for Runaround by The Regents ...
--> [error] Don't Forget I Love You by The Butanes
fetching id for Cupid by Sam Cooke ...
fetching id for Please Stay by The Drifters ...
fetching id for San Antonio Rose by Floyd Cramer ...
fetching id for A Tear by Gene McDaniels ...
fetching id for I Never Knew by Clyde McPhatter ...
fetching id for The Charleston by Ernie Fields ...
fetching id for Tears On My Pillow by The McGuire Sisters ...
--> [error] Runaround by The Regents
fetching id for Time Was by The Flamingos ...
fetching id for What A Sweet Thing That Was by The Shirelles ...
fetching id for Peanuts by Rick And The Keens ...
fetching id for Mr. Happiness by Johnny Maestro with The Coeds ...
--> [error] Tears On My Pillow by The McGuire Sisters
fetching id for Girls Girls Girls (Part II) by The Coasters ...
fetching id for Yellow Bird by Arthur Lyman Group ...
fetching id for Sacred by The Castells ...
fetching id for That's What Girls Are Made For by The Spinners ...
fetching id for Raindrops by Dee Clark ...
fetching id for Mighty Good Lovin' by The Miracles ...
fetching id for No, No, No by The Chanters ...
fetching id for The Fish by Bobby Rydell ...
--> [error] Mr. Happiness by Johnny Maestro with The Coeds
fetching id for The Guns Of Navarone by Joe Reisman Orch. & Chorus ...
--> [error] The Fish by Bobby Rydell
fetching id for Run, Run, Run by Ronny Douglas ...
fetching id for My Claire De Lune by Steve Lawrence ...
fetching id for If by The Paragons ...
fetching id for Here In My Heart by Al Martino ...
fetching id for Dedicated (To The Songs I Love) by The 3 Friends ...
--> [error] The Guns Of Navarone by Joe Reisman Orch. & Chorus
fetching id for In Time by Steve Lawrence ...
fetching id for The Switch-A-Roo by Hank Ballard And The Midnighters ...
fetching id for Moody River by Pat Boone ...
fetching id for Every Beat Of My Heart by Pips ...
fetching id for Travelin' Man by Ricky Nelson ...
fetching id for Better Tell Him No by The Starlets ...
fetching id for Hello Mary Lou by Ricky Nelson ...
fetching id for Should I by The String-A-Longs ...
fetching id for You Can't Sit Down Part 2 by Philip Upchurch Combo ...
--> [error] Dedicated (To The Songs I Love) by The 3 Friends
fetching id for Ready For Your Love by Shep And The Limelites ...
fetching id for Heart And Soul by Jan & Dean ...
fetching id for The Girl's A Devil by The Dukays ...
fetching id for What Would You Do? by Jim Reeves ...
--> [error] You Can't Sit Down Part 2 by Philip Upchurch Combo
fetching id for Take A Fool's Advice by Nat King Cole ...
fetching id for Lovedrops by Mickey & Sylvia ...
--> [error] What Would You Do? by Jim Reeves
fetching id for The Bells Are Ringing by The Van Dykes ...
fetching id for Running Scared by Roy Orbison ...
fetching id for Tell Me Why by The Belmonts ...
fetching id for I'm Comin' On On Back To You by Jackie Wilson ...
fetching id for Bobby by Neil Scott ...
fetching id for You'll Answer To Me by Patti Page ...
--> [error] Lovedrops by Mickey & Sylvia
fetching id for Dance On Little Girl by Paul Anka ...
fetching id for My Memories Of You by Donnie and The Dreamers ...
fetching id for La Dolce Vita (The Sweet Life) by Ray Ellis And His Orchestra ...
--> [error] You'll Answer To Me by Patti Page
fetching id for Black Land Farmer by Frankie Miller ...
fetching id for Now You Know by Little Willie John ...
fetching id for All I Have To Do Is Dream by The Everly Brothers ...
fetching id for Those Oldies But Goodies (Remind Me Of You) by Little Caesar and The Romans ...
fetching id for The Writing On The Wall by Adam Wade ...
fetching id for Heart And Soul by The Cleftones ...
fetching id for Tonight (Could Be The Night) by The Velvets featuring Virgil Johnson ...
fetching id for Three Hearts In A Tangle by Roy Drusky ...
fetching id for Rainin' In My Heart by Slim Harpo ...
fetching id for The Graduation Song... Pomp And Circumstance by Adrian Kimberly ...
--> [error] La Dolce Vita (The Sweet Life) by Ray Ellis And His Orchestra
fetching id for Too Many Rules by Connie Francis ...
--> [error] The Graduation Song... Pomp And Circumstance by Adrian Kimberly
fetching id for Tender Years by George Jones ...
fetching id for Granada by Frank Sinatra ...
--> [error] Too Many Rules by Connie Francis
fetching id for It Keeps Rainin' by Fats Domino ...
fetching id for Ole Buttermilk Sky by Bill Black's Combo ...
fetching id for Drivin' Home by Duane Eddy And The Rebels ...
fetching id for Te-Ta-Te-Ta-Ta by Ernie K-Doe ...
fetching id for You Always Hurt The One You Love by Clarence Henry ...
fetching id for Nature Boy by Bobby Darin ...
fetching id for Hello Walls by Faron Young ...
fetching id for Mom And Dad's Waltz by Patti Page ...
--> [error] Drivin' Home by Duane Eddy And The Rebels
fetching id for Watch Your Step by Bobby Parker ...
fetching id for Never On Sunday by Don Costa And His Orchestra And Chorus ...
--> [error] Mom And Dad's Waltz by Patti Page
fetching id for Barbara-Ann by The Regents ...
fetching id for Eventually by Brenda Lee ...
fetching id for Joanie by Frankie Calen ...
fetching id for I've Got News For You by Ray Charles ...
--> [error] Never On Sunday by Don Costa And His Orchestra And Chorus
fetching id for Fool That I Am by Etta James ...
fetching id for Wishin' On A Rainbow by Phill Wilson ...
--> [error] I've Got News For You by Ray Charles
fetching id for Peanut Butter by The Marathons ...
fetching id for Jura (I Swear I Love You) by Les Paul And Mary Ford ...
fetching id for Rama Lama Ding Dong by The Edsels ...
fetching id for Yellow Bird by Lawrence Welk And His Orchestra ...
--> [error] Wishin' On A Rainbow by Phill Wilson
fetching id for Dream by Etta James ...
fetching id for Little Egypt (Ying-Yang) by The Coasters ...
fetching id for Blue Tomorrow by Billy Vaughn And His Orchestra ...
--> [error] Yellow Bird by Lawrence Welk And His Orchestra
fetching id for Point Of No Return by Adam Wade ...
--> [error] Blue Tomorrow by Billy Vaughn And His Orchestra
fetching id for I Feel So Bad by Elvis Presley ...
fetching id for Boogie Woogie by B. Bumble & The Stingers ...
fetching id for I'm A Fool To Care by Joe Barry ...
--> [error] Point Of No Return by Adam Wade
fetching id for Temptation by The Everly Brothers ...
fetching id for Wild In The Country by Elvis Presley With The Jordanaires ...
fetching id for I Don't Mind by James Brown And The Famous Flames ...
fetching id for Count Every Star by Donnie and The Dreamers ...
fetching id for Tragedy by The Fleetwoods ...
fetching id for A Scottish Soldier (Green Hills of Tyrol) by Andy Stewart ...
--> [error] I'm A Fool To Care by Joe Barry
fetching id for Hold Back The Tears by The Delacardos ...
fetching id for Daydreams by Johnny Crawford ...
fetching id for (Theme From) "Goodbye Again" by Ferrante & Teicher ...
fetching id for I'll Never Be Free by Kay Starr ...
fetching id for Broken Hearted by The Miracles ...
fetching id for Daddy's Home by Shep And The Limelites ...
fetching id for A Hundred Pounds Of Clay by Gene McDaniels ...
fetching id for Mother-In-Law by Ernie K-Doe ...
fetching id for Little Devil by Neil Sedaka ...
fetching id for Mama Said by The Shirelles ...
fetching id for Runaway by Del Shannon ...
fetching id for Stick With Me Baby by The Everly Brothers ...
fetching id for Girl Of My Best Friend by Ral Donner ...
fetching id for Lullaby Of Love by Frank Gari ...
--> [error] (Theme From) "Goodbye Again" by Ferrante & Teicher
fetching id for Every Beat Of My Heart by Gladys Knight And The Pips ...
fetching id for The Bilbao Song by Andy Williams ...
fetching id for The Lonely Crowd by Teddy Vann ...
--> [error] Lullaby Of Love by Frank Gari
fetching id for I'm Gonna Move To The Outskirts Of Town by Ray Charles ...
--> [error] The Lonely Crowd by Teddy Vann
fetching id for Sad Eyes (Don't You Cry) by The Echoes ...
fetching id for Jimmy Martinez by Marty Robbins ...
--> [error] I'm Gonna Move To The Outskirts Of Town by Ray Charles
fetching id for Nobody Cares (about me) by Jeanette (Baby) Washington ...
--> [error] Nobody Cares (about me) by Jeanette (Baby) Washington
fetching id for Anna by Jorgen Ingmann & His Guitar ...
--> [error] Jimmy Martinez by Marty Robbins
fetching id for Breakin' In A Brand New Broken Heart by Connie Francis ...
fetching id for Triangle by Janie Grant ...
fetching id for What A Surprise by Johnny Maestro The Voice Of The Crests ...
fetching id for Halfway To Paradise by Tony Orlando ...
fetching id for Exodus by Eddie Harris ...
fetching id for Portrait Of My Love by Steve Lawrence ...
fetching id for The Wayward Wind by Gogi Grant ...
fetching id for That Old Black Magic by Bobby Rydell ...
fetching id for How Many Tears by Bobby Vee ...
fetching id for Buzz Buzz A-Diddle-It by Freddy Cannon ...
fetching id for The Float by Hank Ballard And The Midnighters ...
fetching id for Dooley by The Olympics ...
fetching id for I've Told Every Little Star by Linda Scott ...
fetching id for You Can Depend On Me by Brenda Lee ...
fetching id for A Love Of My Own by Carla Thomas ...
fetching id for Take Good Care Of Her by Adam Wade ...
fetching id for Just For Old Time's Sake by The McGuire Sisters ...
fetching id for Big Big World by Johnny Burnette ...
fetching id for Big Boss Man by Jimmy Reed ...
fetching id for Lonely Life by Jackie Wilson ...
--> [error] What A Surprise by Johnny Maestro The Voice Of The Crests
fetching id for Milord by Teresa Brewer ...
fetching id for Summertime by The Marcels ...
fetching id for Miss Fine by The New Yorkers ...
fetching id for Lonesome Whistle Blues by Freddy King ...
fetching id for (I've Got) Spring Fever by Little Willie John ...
fetching id for Ring Of Fire by Duane Eddy ...
fetching id for Tonight I Fell In Love by The Tokens ...
fetching id for Blue Moon by The Marcels ...
fetching id for Bonanza by Al Caiola And His Orchestra ...
fetching id for But I Do by Clarence Henry ...
fetching id for Glory Of Love by The Roommates ...
fetching id for That's The Way With Love by Piero Soffici ...
--> [error] Lonely Life by Jackie Wilson
fetching id for (Dance The) Mess Around by Chubby Checker ...
fetching id for In My Heart by The Timetones ...
fetching id for Who Else But You by Frankie Avalon ...
fetching id for You'd Better Come Home by Russell Byrd ...
--> [error] That's The Way With Love by Piero Soffici
fetching id for A Little Feeling (Called Love) by Jack Scott ...
fetching id for Ronnie by Marcy Joe ...
--> [error] You'd Better Come Home by Russell Byrd
fetching id for Son-In-Law by Louise Brown ...
fetching id for Can't Help Lovin' That Girl Of Mine by The Excels ...
--> [error] Ronnie by Marcy Joe
fetching id for Flaming Star by Elvis Presley With The Jordanaires ...
--> [error] Can't Help Lovin' That Girl Of Mine by The Excels
fetching id for Some Kind Of Wonderful by The Drifters ...
fetching id for One Mint Julep by Ray Charles ...
fetching id for Bumble Boogie by B. Bumble & The Stingers ...
fetching id for Funny by Maxine Brown ...
fetching id for On The Rebound by Floyd Cramer ...
fetching id for The Touchables In Brooklyn by Dickie Goodman ...
fetching id for Son-In-Law by The Blossoms ...
fetching id for Brother-In-Law (He's A Moocher) by Paul Peek ...
fetching id for Driving Wheel by Junior Parker ...
fetching id for Here's My Confession by Wyatt (Earp) McPherson ...
fetching id for Baby Blue by The Echoes ...
fetching id for (It Never Happens) In Real Life by Chuck Jackson ...
fetching id for The Continental Walk by Hank Ballard And The Midnighters ...
fetching id for Tonight My Love, Tonight by Paul Anka ...
fetching id for Asia Minor by Kokomo ...
fetching id for Be My Boy by Paris Sisters ...
fetching id for Good, Good Lovin' by Chubby Checker ...
fetching id for What'd I Say by Jerry Lee Lewis And His Pumping Piano ...
--> [error] Here's My Confession by Wyatt (Earp) McPherson
fetching id for Underwater by The Frogmen ...
fetching id for Lullaby Of The Leaves by The Ventures ...
fetching id for Saved by LaVern Baker ...
fetching id for I Can't Do It By Myself by Anita Bryant ...
--> [error] What'd I Say by Jerry Lee Lewis And His Pumping Piano
fetching id for I'm In The Mood For Love by The Chimes ...
fetching id for Our Love Is Here To Stay by Dinah Washington ...
fetching id for A Cross Stands Alone by Jimmy Witter ...
--> [error] I Can't Do It By Myself by Anita Bryant
fetching id for Pick Me Up On Your Way Down by Pat Zill ...
--> [error] A Cross Stands Alone by Jimmy Witter
fetching id for For Your Love by The Wanderers ...
--> [error] Pick Me Up On Your Way Down by Pat Zill
fetching id for Apache by Jorgen Ingmann & His Guitar ...
--> [error] For Your Love by The Wanderers
fetching id for Please Love Me Forever by Cathy Jean and The Roommates ...
fetching id for Foolin' Around by Kay Starr ...
fetching id for African Waltz by Cannonball Adderley Orchestra ...
fetching id for Shy Away by Jerry Fuller ...
--> [error] Shy Away by Jerry Fuller
fetching id for Kissin Game by Dion ...
fetching id for In Between Tears by Lenny Miles ...
fetching id for You're Gonna Need Magic by Roy Hamilton ...
--> [error] African Waltz by Cannonball Adderley Orchestra
fetching id for What Will I Tell My Heart by The Harptones ...
fetching id for (A Ship Will Come) Ein Schiff Wird Kommen by Lale Anderson ...
--> [error] You're Gonna Need Magic by Roy Hamilton
fetching id for Frogg by The Brothers Four ...
fetching id for Dedicated To The One I Love by The Shirelles ...
fetching id for Surrender by Elvis Presley With The Jordanaires ...
--> [error] (A Ship Will Come) Ein Schiff Wird Kommen by Lale Anderson
fetching id for Trust In Me by Etta James ...
fetching id for Pony Time by Chubby Checker ...
fetching id for Don't Worry by Marty Robbins ...
fetching id for Find Another Girl by Jerry Butler ...
fetching id for Tenderly by Bert Kaempfert And His Orchestra ...
fetching id for Sleepy-Eyed John by Johnny Horton ...
fetching id for A Dollar Down by The Limeliters ...
fetching id for Hide Away by Freddy King ...
fetching id for Brass Buttons by The String-A-Longs ...
fetching id for This World We Love In (Il Cielo In Una Stanza) by Mina ...
fetching id for Life's A Holiday by Jerry Wallace ...
fetching id for Walk Right Back by The Everly Brothers ...
fetching id for Please Tell Me Why by Jackie Wilson ...
fetching id for Think Twice by Brook Benton ...
fetching id for (Love Theme From) One Eyed Jacks by Ferrante & Teicher ...
fetching id for Like, Long Hair by Paul Revere & The Raiders ...
fetching id for Shu Rah by Fats Domino ...
fetching id for (Theme from) My Three Sons by Lawrence Welk And His Orchestra ...
--> [error] (Love Theme From) One Eyed Jacks by Ferrante & Teicher
fetching id for Gee Whiz (Look At His Eyes) by Carla Thomas ...
fetching id for Theme From Dixie by Duane Eddy ...
fetching id for Once Upon A Time by Rochell And The Candles With Johnny Wyatt ...
--> [error] (Theme from) My Three Sons by Lawrence Welk And His Orchestra
fetching id for Ain't It, Baby by The Miracles ...
fetching id for The Charanga by Merv Griffin ...
fetching id for Where The Boys Are by Connie Francis ...
fetching id for Wheels by The String-A-Longs ...
fetching id for Fell In Love On Monday by Fats Domino ...
fetching id for Happy Birthday Blues by Kathy Young With The Innocents ...
--> [error] The Charanga by Merv Griffin
fetching id for Trees by The Platters ...
fetching id for To Be Loved (Forever) by The Pentagons ...
--> [error] Happy Birthday Blues by Kathy Young With The Innocents
fetching id for That's It - I Quit - I'm Movin' On by Sam Cooke ...
fetching id for Model Girl by Johnny Maestro ...
fetching id for The Next Kiss (Is The Last Goodbye) by Conway Twitty ...
fetching id for The Continental Walk by The Rollers ...
fetching id for The Mess Around by Bobby Freeman ...
fetching id for Hop Scotch by Santo & Johnny ...
--> [error] To Be Loved (Forever) by The Pentagons
fetching id for I'm A Fool To Care by Oscar Black ...
--> [error] Hop Scotch by Santo & Johnny
fetching id for Won'cha Come Home, Bill Bailey by Della Reese ...
fetching id for Ebony Eyes by The Everly Brothers ...
fetching id for Ginnie Bell by Paul Dino ...
--> [error] I'm A Fool To Care by Oscar Black
fetching id for Bye Bye Baby by Mary Wells ...
fetching id for Spanish Harlem by Ben E. King ...
fetching id for Bewildered by James Brown And The Famous Flames ...
fetching id for Merry-Go-Round by Marv Johnson ...
fetching id for The Second Time Around by Frank Sinatra ...
fetching id for Seventeen by Frankie Ford ...
--> [error] Ginnie Bell by Paul Dino
fetching id for Come Along by Maurice Williams & The Zodiacs ...
fetching id for I'll Just Have A Cup Of Coffee (Then I'll Go) by Claude Gray ...
fetching id for The Very Thought Of You by Little Willie John ...
fetching id for La Pachanga by Audrey Arno And The Hazy Osterwald Sextet ...
--> [error] Seventeen by Frankie Ford
fetching id for California Sun by Joe Jones ...
--> [error] California Sun by Joe Jones
fetching id for Ling-Ting-Tong by Buddy Knox ...
fetching id for Come On Over by The Strollers ...
--> [error] La Pachanga by Audrey Arno And The Hazy Osterwald Sextet
fetching id for Welcome Home by Sammy Kaye And His Orchestra ...
--> [error] Come On Over by The Strollers
fetching id for Your One And Only Love by Jackie Wilson ...
fetching id for Ground Hog by The Browns Featuring Jim Edward Brown ...
fetching id for Where I Fell In Love by The Capris ...
fetching id for Little Pedro by The Olympics ...
--> [error] Welcome Home by Sammy Kaye And His Orchestra
fetching id for Lazy River by Bobby Darin ...
fetching id for Baby Sittin' Boogie by Buzz Clifford ...
fetching id for The Watusi by The Vibrations ...
fetching id for The Blizzard by Jim Reeves ...
fetching id for A City Girl Stole My Country Boy by Patti Page ...
fetching id for Mr. Pride by Chuck Jackson ...
fetching id for I Told You So by Jimmy Jones ...
fetching id for Hearts Of Stone by Bill Black's Combo ...
fetching id for Calcutta by Lawrence Welk And His Orchestra ...
--> [error] A City Girl Stole My Country Boy by Patti Page
fetching id for Little Boy Sad by Johnny Burnette ...
fetching id for You Can Have Her by Roy Hamilton ...
--> [error] Calcutta by Lawrence Welk And His Orchestra
fetching id for Good Time Baby by Bobby Rydell ...
fetching id for Exodus by Ferrante & Teicher ...
fetching id for There's A Moon Out Tonight by The Capris ...
fetching id for Blue Moon by Herb Lance & The Classics ...
fetching id for Little Miss Stuck-Up by The Playmates ...
--> [error] Blue Moon by Herb Lance & The Classics
fetching id for Your Friends by Dee Clark ...
fetching id for Pony Express by Danny & The Juniors ...
fetching id for The Touchables by Dickie Goodman ...
fetching id for Lonely Blue Nights by Rosie ...
--> [error] Little Miss Stuck-Up by The Playmates
fetching id for Honky Tonk (Part 2) by Bill Doggett ...
fetching id for (Theme from) The Great Impostor by Henry Mancini And His Orchestra ...
fetching id for I Pity The Fool by Bobby Bland ...
fetching id for Kokomo by The Flamingos ...
--> [error] Kokomo by The Flamingos
fetching id for Little Turtle Dove by Otis Williams And His Charms ...
--> [error] (Theme from) The Great Impostor by Henry Mancini And His Orchestra
fetching id for I Don't Want To Cry by Chuck Jackson ...
fetching id for Sweet Little Kathy by Ray Peterson ...
fetching id for Shop Around by The Miracles (featuring Bill "Smokey" Robinson) ...
--> [error] Little Turtle Dove by Otis Williams And His Charms
fetching id for Will You Love Me Tomorrow by The Shirelles ...
fetching id for Let's Go Again (Where We Went Last Night) by Hank Ballard And The Midnighters ...
fetching id for Wheels by Billy Vaughn And His Orchestra ...
--> [error] Shop Around by The Miracles (featuring Bill "Smokey" Robinson)
fetching id for Wings Of A Dove by Ferlin Husky ...
fetching id for For My Baby by Brook Benton ...
fetching id for Lonely Man by Elvis Presley With The Jordanaires ...
fetching id for Calendar Girl by Neil Sedaka ...
fetching id for More Than I Can Say by Bobby Vee ...
fetching id for Stayin' In by Bobby Vee ...
fetching id for Orange Blossom Special by Billy Vaughn And His Orchestra ...
--> [error] Wheels by Billy Vaughn And His Orchestra
fetching id for Top Forty, News, Weather And Sports by Mark Dinning ...
fetching id for Green Grass Of Texas by The Texans ...
fetching id for What A Price by Fats Domino ...
fetching id for Ram-Bunk-Shush by The Ventures ...
fetching id for Emotions by Brenda Lee ...
fetching id for Pony Time by The Goodtimers ...
--> [error] Orange Blossom Special by Billy Vaughn And His Orchestra
fetching id for It's Unbelievable by The Larks ...
--> [error] Pony Time by The Goodtimers
fetching id for Theme from Tunes Of Glory by The Cambridge Strings And Singers ...
fetching id for Jimmy's Girl by Johnny Tillotson ...
fetching id for All In My Mind by Maxine Brown ...
fetching id for All Of Everything by Frankie Avalon ...
--> [error] All Of Everything by Frankie Avalon
fetching id for Utopia by Frank Gari ...
--> [error] Theme from Tunes Of Glory by The Cambridge Strings And Singers
fetching id for Wait A Minute by The Coasters ...
fetching id for (I Wanna) Love My Life Away by Gene Pitney ...
fetching id for Memphis by Donnie Brooks ...
fetching id for Milord by Edith Piaf ...
fetching id for Ain't That Just Like A Woman by Fats Domino ...
fetching id for Angel On My Shoulder by Shelby Flint ...
fetching id for Havin' Fun by Dion ...
fetching id for Cerveza by Bert Kaempfert And His Orchestra ...
--> [error] Utopia by Frank Gari
fetching id for Won't Be Long by Aretha Franklin With The Ray Bryant Combo ...
fetching id for Apache by Sonny James ...
--> [error] Cerveza by Bert Kaempfert And His Orchestra
fetching id for Canadian Sunset by Etta Jones ...
fetching id for Oh Mein Papa by Dick Lee ...
--> [error] Apache by Sonny James
fetching id for Early Every Morning (Early Every Evening Too) by Dinah Washington ...
fetching id for I Lied To My Heart by The Enchanters ...
fetching id for Cowboy Jimmy Joe (Die Sterne Der Prarie) by Lolita ...
--> [error] Oh Mein Papa by Dick Lee
fetching id for The Story Of My Love by Paul Anka ...
--> [error] Cowboy Jimmy Joe (Die Sterne Der Prarie) by Lolita
fetching id for No One by Connie Francis ...
fetching id for Wonderland By Night by Bert Kaempfert And His Orchestra ...
fetching id for Pepe by Duane Eddy His Twangy Guitar And The Rebels ...
--> [error] The Story Of My Love by Paul Anka
fetching id for Angel Baby by Rosie And The Originals ...
fetching id for My Empty Arms by Jackie Wilson ...
fetching id for The Tear Of The Year by Jackie Wilson ...
fetching id for Cherié by Bobby Rydell ...
--> [error] Pepe by Duane Eddy His Twangy Guitar And The Rebels
fetching id for (Ghost) Riders In The Sky by Ramrods ...
fetching id for At Last by Etta James ...
fetching id for When I Fall In Love by Etta Jones ...
fetching id for C'est Si Bon (It's So Good) by Conway Twitty ...
fetching id for Keep Your Hands Off Of Him by Damita Jo ...
fetching id for Tunes Of Glory by Mitch Miller With Orchestra And Chorus ...
--> [error] Cherié by Bobby Rydell
fetching id for The Most Beautiful Words by Della Reese ...
fetching id for A Texan And A Girl From Mexico by Anita Bryant ...
fetching id for Ja-Da by Johnny And The Hurricanes ...
fetching id for Rubber Ball by Bobby Vee ...
fetching id for Once In Awhile by The Chimes ...
--> [error] A Texan And A Girl From Mexico by Anita Bryant
fetching id for I Count The Tears by The Drifters ...
fetching id for There She Goes by Jerry Wallace ...
fetching id for The Age For Love by Jimmy Charles ...
fetching id for Are You Lonesome To-night? by Elvis Presley With The Jordanaires ...
--> [error] Once In Awhile by The Chimes
fetching id for What Would I Do by Mickey & Sylvia ...
--> [error] Are You Lonesome To-night? by Elvis Presley With The Jordanaires
fetching id for Corinna, Corinna by Ray Peterson ...
fetching id for Leave My Kitten Alone by Little Willie John ...
fetching id for You're The Boss by LaVern Baker & Jimmy Ricks ...
fetching id for Dream Boy by Annette With The Afterbeats ...
--> [error] What Would I Do by Mickey & Sylvia
fetching id for If I Didn't Care by The Platters ...
fetching id for Leave My Kitten Alone by Johnny Preston ...
fetching id for The Exodus Song (This Land Is Mine) by Pat Boone ...
fetching id for A Lover's Question by Ernestine Anderson ...
fetching id for Cherry Berry Wine by Charlie Mccoy ...
--> [error] A Lover's Question by Ernestine Anderson
fetching id for Battle Of Gettysburg by Fred Darian ...
fetching id for Lost Love by H.B. Barnum ...
fetching id for North To Alaska by Johnny Horton ...
fetching id for I'm Learning About Love by Brenda Lee ...
fetching id for Last Date by Floyd Cramer ...
fetching id for Close Together by Jimmy Reed ...
fetching id for Sailor (Your Home Is The Sea) by Lolita ...
fetching id for Them That Got by Ray Charles and his Orchestra ...
--> [error] Cherry Berry Wine by Charlie Mccoy
fetching id for The Magnificent Seven by Al Caiola And His Orchestra ...
--> [error] Them That Got by Ray Charles and his Orchestra
fetching id for Cherry Pink And Apple Blossom White by Jerry Murad's Harmonicats ...
--> [error] The Magnificent Seven by Al Caiola And His Orchestra
fetching id for Charlena by The Sevilles ...
fetching id for Dance By The Light Of The Moon by The Olympics ...
fetching id for A Thousand Stars by Kathy Young With The Innocents ...
--> [error] Cherry Pink And Apple Blossom White by Jerry Murad's Harmonicats
fetching id for Don't Believe Him, Donna by Lenny Miles ...
fetching id for The Hoochi Coochi Coo by Hank Ballard And The Midnighters ...
fetching id for First Taste Of Love by Ben E. King ...
fetching id for Dedicated To The One I Love by The "5" Royales ...
fetching id for Muskrat Ramble by Freddy Cannon ...
fetching id for Main Theme from Exodus (Ari's Theme) by Mantovani & His Orch. ...
--> [error] Don't Believe Him, Donna by Lenny Miles
fetching id for What Am I Gonna Do by Jimmy Clanton ...
fetching id for Wonderland By Night by Louis Prima ...
fetching id for You Are The Only One by Ricky Nelson ...
fetching id for Don't Let Him Shop Around by Debbie Dean ...
fetching id for Lovey Dovey by Buddy Knox ...
fetching id for Calcutta by The Four Preps ...
fetching id for What About Me by Don Gibson ...
fetching id for Baby Oh Baby by The Shells ...
fetching id for He Will Break Your Heart by Jerry Butler ...
fetching id for Wonderland By Night by Anita Bryant ...
--> [error] Main Theme from Exodus (Ari's Theme) by Mantovani & His Orch.
fetching id for Yes, I'm Lonesome Tonight by Thelma Carpenter ...
--> [error] Wonderland By Night by Anita Bryant
fetching id for Yes, I'm Lonesome Tonight by Dodie Stevens ...
fetching id for Sound-Off by Titus Turner ...
fetching id for You're Sixteen by Johnny Burnette ...
fetching id for Many Tears Ago by Connie Francis ...
fetching id for Doll House by Donnie Brooks ...
fetching id for Stay by Maurice Williams & The Zodiacs ...
fetching id for Lonely Teenager by Dion ...
fetching id for My Last Date (With You) by Skeeter Davis ...
fetching id for Blue Tango by Bill Black's Combo ...
fetching id for Gee Whiz by The Innocents ...
fetching id for I'm Hurtin' by Roy Orbison ...
fetching id for My Last Date (With You) by Joni James ...
--> [error] Yes, I'm Lonesome Tonight by Thelma Carpenter
fetching id for I Remember by Maurice Williams & The Zodiacs ...
fetching id for Sugar Bee by Cleveland Crochet and Band ...
--> [error] My Last Date (With You) by Joni James
fetching id for Chills And Fever by Ronnie Love And His Orchestra ...
fetching id for Trouble In Mind by Nina Simone ...
fetching id for My Girl Josephine by Fats Domino ...
fetching id for Flamingo Express by The Royaltones ...
--> [error] Chills And Fever by Ronnie Love And His Orchestra
fetching id for I Gotta Know by Elvis Presley With The Jordanaires ...
--> [error] Flamingo Express by The Royaltones
fetching id for Bumble Bee by LaVern Baker ...
fetching id for Perfidia by The Ventures ...
fetching id for Don't Read The Letter by Patti Page ...
fetching id for Happy Days by Marv Johnson ...
fetching id for Last Date by Lawrence Welk And His Orchestra ...
--> [error] Don't Read The Letter by Patti Page
fetching id for Sad Mood by Sam Cooke ...
fetching id for You Don't Want My Love by Andy Williams ...
fetching id for Gift Of Love by Van Dykes ...
fetching id for Tonite, Tonite by Mello-Kings ...
fetching id for The Puppet Song by Frankie Avalon ...
--> [error] Last Date by Lawrence Welk And His Orchestra
fetching id for Oh, How I Miss You Tonight by Jeanne Black ...
fetching id for In The Still Of The Nite by The Five Satins ...
fetching id for Sway by Bobby Rydell ...
fetching id for Poetry In Motion by Johnny Tillotson ...
fetching id for Ruby by Ray Charles ...
fetching id for A Perfect Love by Frankie Avalon ...
fetching id for Gonzo by James Booker ...
fetching id for Alone At Last by Jackie Wilson ...
fetching id for Walk Slow by Little Willie John ...
fetching id for Your Other Love by The Flamingos ...
fetching id for I Don't Want Nobody (To Have My Love But You) by Ella Johnson With Buddy Johnson ...
--> [error] The Puppet Song by Frankie Avalon
fetching id for Milk Cow Blues by Ricky Nelson ...
fetching id for Wabash Blues by The Viscounts ...
fetching id for You've Got To Love Her With A Feeling by Freddy King ...
fetching id for Fools Rush In (Where Angels Fear To Tread) by Brook Benton ...
fetching id for And The Heavens Cried by Ronnie Savoy ...
--> [error] Milk Cow Blues by Ricky Nelson
fetching id for New Orleans by U.S. Bonds ...
fetching id for Ballad Of The Alamo by Marty Robbins ...
fetching id for How To Handle A Woman by Johnny Mathis ...
fetching id for (Let's Do) The Hully Gully Twist by Bill Doggett ...
--> [error] And The Heavens Cried by Ronnie Savoy
fetching id for We Have Love by Dinah Washington ...
fetching id for If I Knew by Nat King Cole ...
fetching id for Is There Something On Your Mind by Jack Scott ...
fetching id for I'll Save The Last Dance For You by Damita Jo ...
fetching id for Oh Lonesome Me by Johnny Cash With The Gene Lowery Singers ...
fetching id for Rockin', Rollin' Ocean by Hank Snow ...
fetching id for Spoonful by Etta & Harvey ...
fetching id for The Hucklebuck by Chubby Checker ...
fetching id for Am I Losing You by Jim Reeves ...
--> [error] (Let's Do) The Hully Gully Twist by Bill Doggett
fetching id for Twistin' Bells by Santo & Johnny ...
fetching id for Christmas Auld Lang Syne by Bobby Darin ...
fetching id for Let's Go, Let's Go, Let's Go by Hank Ballard And The Midnighters ...
fetching id for Like Strangers by The Everly Brothers ...
fetching id for Ol' Mac Donald by Frank Sinatra ...
fetching id for Save The Last Dance For Me by The Drifters ...
fetching id for Mister Livingston by Larry Verne ...
fetching id for Make Someone Happy by Perry Como ...
--> [error] Ol' Mac Donald by Frank Sinatra
fetching id for I Idolize You by Ike & Tina Turner ...
fetching id for Talk To Me Baby by Annette With The Afterbeats ...
--> [error] Make Someone Happy by Perry Como
fetching id for This Is My Story by Mickey & Sylvia ...
fetching id for Adeste Fideles (Oh, Come, All Ye Faithful) by Bing Crosby ...
fetching id for Silent Night by Bing Crosby ...
fetching id for Am I The Man by Jackie Wilson ...
fetching id for Natural Born Lover by Fats Domino ...
fetching id for Rudolph The Red Nosed Reindeer by The Melodeers ...
--> [error] This Is My Story by Mickey & Sylvia
fetching id for Rambling by The Ramblers ...
fetching id for Gloria's Theme by Adam Wade ...
fetching id for Send Me The Pillow You Dream On by The Browns Featuring Jim Edward Brown ...
fetching id for Ramona by The Blue Diamonds ...
fetching id for Have You Ever Been Lonely (Have You Ever Been Blue) by Teresa Brewer ...
fetching id for Someday You'll Want Me To Want You by Brook Benton ...
fetching id for Child Of God by Bobby Darin ...
fetching id for Georgia On My Mind by Ray Charles ...
fetching id for I Want To Be Wanted by Brenda Lee ...
fetching id for Don't Go To Strangers by Etta Jones ...
fetching id for Sleep by Little Willie John ...
fetching id for Blue Angel by Roy Orbison ...
fetching id for The Bells by James Brown ...
fetching id for To Each His Own by The Platters ...
fetching id for I Missed Me by Jim Reeves ...
fetching id for Ruby Duby Du by Tobin Mathews & Co. ...
--> [error] Rudolph The Red Nosed Reindeer by The Melodeers
fetching id for Blue Christmas by The Browns Featuring Jim Edward Brown ...
fetching id for You Talk Too Much by Joe Jones ...
fetching id for Artificial Flowers by Bobby Darin ...
fetching id for Alabam by Pat Boone ...
fetching id for Togetherness by Frankie Avalon ...
fetching id for My Dearest Darling by Etta James ...
fetching id for Dear John by Pat Boone ...
--> [error] Alabam by Pat Boone
fetching id for Wait For Me by The Playmates ...
--> [error] Dear John by Pat Boone
fetching id for Groovy Tonight by Bobby Rydell ...
--> [error] Wait For Me by The Playmates
fetching id for Ruby Duby Du From Key Witness by Charles Wolcott ...
--> [error] Groovy Tonight by Bobby Rydell
fetching id for Alabam by Cowboy Copas ...
fetching id for Gee by Jan & Dean ...
fetching id for Hardhearted Hannah by Ray Charles ...
--> [error] Ruby Duby Du From Key Witness by Charles Wolcott
fetching id for The Green Leaves Of Summer by The Brothers Four ...
fetching id for (You Better) Know What You're Doin' by Lloyd Price and His Orchestra ...
--> [error] Hardhearted Hannah by Ray Charles
fetching id for The Big Time Spender (Parts I & II) by Cornbread & Biscuits ...
fetching id for Ballad Of The Alamo by Bud & Travis ...
--> [error] The Big Time Spender (Parts I & II) by Cornbread & Biscuits
fetching id for Devil Or Angel by Bobby Vee ...
fetching id for Don't Be Cruel by Bill Black's Combo ...
fetching id for Peter Gunn by Duane Eddy His Twangy Guitar And The Rebels ...
--> [error] Ballad Of The Alamo by Bud & Travis
fetching id for My Heart Has A Mind Of Its Own by Connie Francis ...
fetching id for Summer's Gone by Paul Anka ...
fetching id for Let's Think About Living by Bob Luman ...
fetching id for Love Walked In by Dinah Washington ...
fetching id for The Sundowners by Billy Vaughn And His Orchestra ...
--> [error] Peter Gunn by Duane Eddy His Twangy Guitar And The Rebels
fetching id for Cry Cry Cry by Bobby Bland ...
fetching id for Theme From The Apartment by Ferrante & Teicher ...
fetching id for Serenata by Sarah Vaughan ...
fetching id for You Are My Sunshine by Johnny And The Hurricanes ...
fetching id for Sweet Dreams by Don Gibson ...
fetching id for Psycho by Bobby Hendricks ...
fetching id for Diamonds And Pearls by The Paradons ...
fetching id for Little Miss Blue by Dion ...
fetching id for Chain Gang by Sam Cooke ...
fetching id for Whole Lot Of Shakin' Going On by Conway Twitty ...
--> [error] The Sundowners by Billy Vaughn And His Orchestra
fetching id for Kiddio by Brook Benton ...
fetching id for Tonights The Night by The Shirelles ...
fetching id for A Million To One by Jimmy Charles and The Revelletts ...
--> [error] Whole Lot Of Shakin' Going On by Conway Twitty
fetching id for Night Theme by The Mark II ...
fetching id for Have Mercy Baby by The Bobbettes ...
--> [error] Night Theme by The Mark II
fetching id for Whole Lotta Shakin' Goin' On by Chubby Checker ...
fetching id for Stranger From Durango by Richie Allen ...
--> [error] Have Mercy Baby by The Bobbettes
fetching id for Theme From The Sundowners by Felix Slatkin Orchestra and Chorus ...
--> [error] Stranger From Durango by Richie Allen
fetching id for (Theme from) "The Dark At The Top Of The Stairs" by Ernie Freeman ...
--> [error] Theme From The Sundowners by Felix Slatkin Orchestra and Chorus
fetching id for It's Now Or Never by Elvis Presley With The Jordanaires ...
fetching id for So Sad (To Watch Good Love Go Bad) by The Everly Brothers ...
fetching id for Mr. Custer by Larry Verne ...
fetching id for A Fool In Love by Ike & Tina Turner ...
fetching id for Somebody To Love by Bobby Darin ...
fetching id for Shimmy Like Kate by The Olympics ...
fetching id for Side Car Cycle by Charlie Ryan and the Timberline Riders ...
--> [error] (Theme from) "The Dark At The Top Of The Stairs" by Ernie Freeman
fetching id for Anymore by Teresa Brewer ...
fetching id for Push Push by Austin Taylor ...
fetching id for One Of The Lucky Ones by Anita Bryant ...
--> [error] Push Push by Austin Taylor
fetching id for Fallen Angel by Webb Pierce ...
--> [error] One Of The Lucky Ones by Anita Bryant
fetching id for (You've Got To) Move Two Mountains by Marv Johnson ...
fetching id for Three Nights A Week by Fats Domino ...
fetching id for Twistin' U.S.A. by Danny & The Juniors ...
fetching id for Walk -- Don't Run by The Ventures ...
fetching id for I Wish I'd Never Been Born by Patti Page ...
--> [error] Fallen Angel by Webb Pierce
fetching id for Run Samson Run by Neil Sedaka ...
fetching id for That's How Much by Brian Hyland ...
fetching id for Pineapple Princess by Annette With The Afterbeats ...
--> [error] I Wish I'd Never Been Born by Patti Page
fetching id for Lucille by The Everly Brothers ...
fetching id for Finger Poppin' Time by Hank Ballard And The Midnighters ...
fetching id for If She Should Come To You (La Montana) by Anthony Newley ...
fetching id for Patsy by Jack Scott ...
fetching id for (I Do The) Shimmy Shimmy by Bobby Freeman ...
fetching id for Senza Mamma (With No One) by Connie Francis ...
fetching id for Midnight Lace by Ray Ellis ...
fetching id for Everglades by The Kingston Trio ...
fetching id for Kissin' And Twistin' by Fabian ...
--> [error] Midnight Lace by Ray Ellis
fetching id for (Theme From) The Sundowners by Mantovani & His Orchestra ...
--> [error] Kissin' And Twistin' by Fabian
fetching id for Midnight Lace - Part I by Ray Conniff His Orchestra And Chorus ...
--> [error] (Theme From) The Sundowners by Mantovani & His Orchestra
fetching id for A Thousand Miles Away by The Heartbeats ...
fetching id for My Love For You by Johnny Mathis ...
fetching id for The Same One by Brook Benton ...
fetching id for Just A Little by Brenda Lee ...
fetching id for You Mean Everything To Me by Neil Sedaka ...
fetching id for Let's Have A Party by Wanda Jackson ...
fetching id for Dreamin' by Johnny Burnette ...
fetching id for Humdinger by Freddy Cannon ...
fetching id for The Twist by Hank Ballard And The Midnighters ...
fetching id for I'm Sorry by Brenda Lee ...
fetching id for The Girl With The Story In Her Eyes by Safaris with The Phantom's Band ...
fetching id for Temptation by Roger Williams ...
fetching id for Irresistable You by Bobby Peterson ...
--> [error] Midnight Lace - Part I by Ray Conniff His Orchestra And Chorus
fetching id for Volare by Bobby Rydell ...
fetching id for I'm Not Afraid by Ricky Nelson ...
fetching id for Mission Bell by Donnie Brooks ...
fetching id for (The Clickity Clack Song) Four Little Heels by Brian Hyland ...
--> [error] Irresistable You by Bobby Peterson
fetching id for My Hero by The Blue Notes ...
fetching id for Shoppin' For Clothes by The Coasters ...
fetching id for Dance With Me Georgie by The Bobbettes ...
--> [error] (The Clickity Clack Song) Four Little Heels by Brian Hyland
fetching id for Only The Lonely (Know How I Feel) by Roy Orbison ...
fetching id for Midnight Lace by David Carroll And His Orchestra ...
--> [error] Dance With Me Georgie by The Bobbettes
fetching id for Isn't It Amazing by The Crests featuring Johnny Mastro ...
fetching id for Honest I Do by The Innocents ...
fetching id for Malagueña by Connie Francis ...
fetching id for Yes Sir, That's My Baby by Ricky Nelson ...
fetching id for Hello Young Lovers by Paul Anka ...
--> [error] Midnight Lace by David Carroll And His Orchestra
fetching id for Ta Ta by Clyde McPhatter ...
fetching id for If I Can't Have You by Etta & Harvey ...
fetching id for You're Looking Good by Dee Clark ...
fetching id for Hush-Hush by Jimmy Reed ...
fetching id for Time Machine by Dante and the Evergreens ...
--> [error] Hello Young Lovers by Paul Anka
fetching id for It's Not The End Of Everything by Tommy Edwards ...
fetching id for Don't Let Love Pass Me By by Frankie Avalon ...
fetching id for You Talk Too Much by Frankie Ford ...
--> [error] Don't Let Love Pass Me By by Frankie Avalon
fetching id for Please Help Me, I'm Falling by Hank Locklin ...
fetching id for Harmony by Billy Bland ...
fetching id for The Last One To Know by The Fleetwoods ...
fetching id for Yogi by The Ivy Three ...
fetching id for Itsy Bitsy Teenie Weenie Yellow Polkadot Bikini by Brian Hyland ...
--> [error] You Talk Too Much by Frankie Ford
fetching id for In My Little Corner Of The World by Anita Bryant ...
fetching id for Over The Rainbow by The Demensions ...
fetching id for Hot Rod Lincoln by Johnny Bond ...
fetching id for I Love You In The Same Old Way by Paul Anka ...
fetching id for Come Back by Jimmy Clanton ...
fetching id for Rocking Goose by Johnny And The Hurricanes ...
fetching id for Put Your Arms Around Me Honey by Fats Domino ...
fetching id for Wait by Jimmy Clanton ...
fetching id for A Kookie Little Paradise by Jo Ann Campbell ...
--> [error] Itsy Bitsy Teenie Weenie Yellow Polkadot Bikini by Brian Hyland
fetching id for Big Boy Pete by The Olympics ...
fetching id for (You Were Made For) All My Love by Jackie Wilson ...
fetching id for Hot Rod Lincoln by Charlie Ryan and the Timberline Riders ...
--> [error] A Kookie Little Paradise by Jo Ann Campbell
fetching id for Image Of A Girl by Safaris with The Phantom's Band ...
fetching id for A Mess Of Blues by Elvis Presley With The Jordanaires ...
fetching id for Let The Good Times Roll by Shirley & Lee ...
fetching id for My Shoes Keep Walking Back To You by Guy Mitchell ...
fetching id for I Walk The Line by Jaye P. Morgan ...
fetching id for (I Can't Help You) I'm Falling Too by Skeeter Davis ...
fetching id for Is You Is Or Is You Ain't My Baby by Buster Brown ...
fetching id for Five Brothers by Marty Robbins ...
fetching id for Nice 'N' Easy by Frank Sinatra ...
fetching id for Over The Mountain; Across The Sea by Johnnie & Joe ...
fetching id for Just Call Me (And I'll Understand) by Lloyd Price and His Orchestra ...
--> [error] I Walk The Line by Jaye P. Morgan
fetching id for And Now by Della Reese ...
fetching id for The Lovin' Touch by Mark Dinning ...
fetching id for Tonight's The Night by The Chiffons ...
fetching id for Journey Of Love by The Crests featuring Johnny Mastro ...
fetching id for A Woman, A Lover, A Friend by Jackie Wilson ...
fetching id for It Only Happened Yesterday by Jack Scott ...
fetching id for Brontosaurus Stomp by The Piltdown Men ...
fetching id for This Old Heart by James Brown And The Famous Flames ...
fetching id for Kommotion by Duane Eddy ...
fetching id for No by Dodie Stevens ...
fetching id for Alvin For President by David Seville And The Chipmunks ...
--> [error] The Lovin' Touch by Mark Dinning
fetching id for The Wreck Of The "John B" by Jimmie Rodgers ...
fetching id for Walking To New Orleans by Fats Domino ...
fetching id for Mule Skinner Blues by The Fendermen ...
fetching id for Lisa by Jeanne Black ...
--> [error] Alvin For President by David Seville And The Chipmunks
fetching id for Feel So Fine by Johnny Preston ...
fetching id for My Love by Nat King Cole-Stan Kenton ...
--> [error] Lisa by Jeanne Black
fetching id for Shortnin' Bread by Paul Chaplain and his Emeralds ...
fetching id for Red Sails In The Sunset by The Platters Featuring Tony Williams ...
fetching id for We Go Together by Jan & Dean ...
fetching id for Kookie Little Paradise by The Tree Swingers ...
--> [error] My Love by Nat King Cole-Stan Kenton
fetching id for This Bitter Earth by Dinah Washington ...
fetching id for Tell Laura I Love Her by Ray Peterson ...
fetching id for Look For A Star by Garry Miles ...
--> [error] Kookie Little Paradise by The Tree Swingers
fetching id for How High The Moon (Part 1) by Ella Fitzgerald ...
fetching id for Since I Met You Baby by Bobby Vee ...
fetching id for Many A Wonderful Moment by Rosemary Clooney ...
--> [error] Look For A Star by Garry Miles
fetching id for The Old Oaken Bucket by Tommy Sands ...
--> [error] Many A Wonderful Moment by Rosemary Clooney
fetching id for Nobody Knows You When You're Down And Out by Nina Simone ...
fetching id for A Teenager Feels It Too by Denny Reed ...
fetching id for Trouble In Paradise by The Crests ...
fetching id for Alley-Oop by Hollywood Argyles ...
fetching id for Everybody's Somebody's Fool by Connie Francis ...
fetching id for Question by Lloyd Price and His Orchestra ...
--> [error] The Old Oaken Bucket by Tommy Sands
fetching id for Don't Come Knockin' by Fats Domino ...
fetching id for Look For A Star by Billy Vaughn And His Orchestra ...
fetching id for That's All You Gotta Do by Brenda Lee ...
fetching id for One Of Us (Will Weep Tonight) by Patti Page ...
--> [error] Look For A Star by Billy Vaughn And His Orchestra
fetching id for Delia Gone by Pat Boone ...
--> [error] One Of Us (Will Weep Tonight) by Patti Page
fetching id for Is There Any Chance by Marty Robbins ...
fetching id for Candy Sweet by Pat Boone ...
--> [error] Delia Gone by Pat Boone
fetching id for Shortnin' Bread by The Bell Notes ...
fetching id for Beachcomber by Bobby Darin ...
fetching id for In The Still Of The Night by Dion & The Belmonts ...
fetching id for Where Are You by Frankie Avalon ...
fetching id for Look For A Star by Deane Hawley ...
fetching id for Look For A Star - Part I by Garry Mills ...
--> [error] Candy Sweet by Pat Boone
fetching id for One Boy by Joanie Sommers ...
fetching id for Little Bitty Pretty One by Frankie Lymon ...
fetching id for Josephine by Bill Black's Combo ...
fetching id for The Brigade Of Broken Hearts by Paul Evans ...
--> [error] Look For A Star - Part I by Garry Mills
fetching id for Please Help Me, I'm Falling by Rusty Draper ...
--> [error] Please Help Me, I'm Falling by Rusty Draper
fetching id for Because They're Young by Duane Eddy And The Rebels ...
--> [error] The Brigade Of Broken Hearts by Paul Evans
fetching id for Bongo Bongo Bongo by Preston Epps ...
fetching id for Revival by Johnny And The Hurricanes ...
fetching id for Far, Far Away by Don Gibson ...
fetching id for Vaquero (Cowboy) by The Fireballs ...
fetching id for When Will I Be Loved by The Everly Brothers ...
fetching id for Bad Man Blunder by The Kingston Trio ...
fetching id for Is A Blue Bird Blue by Conway Twitty ...
--> [error] Because They're Young by Duane Eddy And The Rebels
fetching id for Alley-Oop by Dante and the Evergreens ...
fetching id for I Shot Mr. Lee by The Bobbettes ...
fetching id for There's Something On Your Mind (Part 2) by Bobby Marchan ...
fetching id for Heartbreak (It's Hurtin' Me) by Little Willie John ...
fetching id for Heartbreak (It's Hurtin' Me) by Jon Thomas and Orchestra ...
--> [error] Is A Blue Bird Blue by Conway Twitty
fetching id for Mio Amore by The Flamingos ...
fetching id for I'm Gettin' Better by Jim Reeves ...
fetching id for I Know One by Jim Reeves ...
fetching id for Blue Velvet by The Statues ...
--> [error] Heartbreak (It's Hurtin' Me) by Jon Thomas and Orchestra
fetching id for Happy Shades Of Blue by Freddie Cannon ...
--> [error] Blue Velvet by The Statues
fetching id for I Really Don't Want To Know by Tommy Edwards ...
fetching id for Wake Me, Shake Me by The Coasters ...
fetching id for Hey Little One by Dorsey Burnette ...
fetching id for A Rockin' Good Way (To Mess Around And Fall In Love) by Dinah Washington & Brook Benton ...
fetching id for My Home Town by Paul Anka ...
fetching id for Sticks And Stones by Ray Charles and his Orchestra ...
--> [error] Happy Shades Of Blue by Freddie Cannon
fetching id for Runaround by The Fleetwoods ...
fetching id for My Tani by The Brothers Four ...
fetching id for Clap Your Hands by The Beau-Marks ...
fetching id for Night Train by The Viscounts ...
--> [error] Sticks And Stones by Ray Charles and his Orchestra
fetching id for Wonderful World by Sam Cooke ...
fetching id for Do You Mind? by Anthony Newley ...
fetching id for Do You Mind? by Andy Williams ...
fetching id for The Last Dance by The McGuire Sisters ...
--> [error] Night Train by The Viscounts
fetching id for Cathy's Clown by The Everly Brothers ...
fetching id for Burning Bridges by Jack Scott ...
fetching id for All I Could Do Was Cry by Etta James ...
fetching id for Love You So by Ron Holden with The Thunderbirds ...
--> [error] The Last Dance by The McGuire Sisters
fetching id for Swingin' Down The Lane by Jerry Wallace ...
fetching id for Cool Water by Jack Scott ...
fetching id for I've Been Loved Before by Shirley and Lee ...
fetching id for That's When I Cried by Jimmy Jones ...
fetching id for Be Bop A-Lula by The Everly Brothers ...
fetching id for She's Mine by Conway Twitty ...
fetching id for Jealous Of You (Tango Della Gelosia) by Connie Francis ...
fetching id for Paper Roses by Anita Bryant ...
--> [error] I've Been Loved Before by Shirley and Lee
fetching id for Pennies From Heaven by The Skyliners ...
fetching id for Mack The Knife by Ella Fitzgerald ...
fetching id for Happy-Go-Lucky-Me by Paul Evans ...
--> [error] Paper Roses by Anita Bryant
fetching id for Won't You Come Home Bill Bailey by Bobby Darin ...
fetching id for I Can't Help It by Adam Wade ...
fetching id for Too Young To Go Steady by Connie Stevens ...
fetching id for Lonely Weekends by Charlie Rich ...
fetching id for There's A Star Spangled Banner Waving #2 (The Ballad Of Francis Powers) by Red River Dave ...
--> [error] I Can't Help It by Adam Wade
fetching id for Theme From Adventures In Paradise by Jerry Byrd ...
--> [error] There's A Star Spangled Banner Waving #2 (The Ballad Of Francis Powers) by Red River Dave
fetching id for Something Happened by Paul Anka ...
--> [error] Theme From Adventures In Paradise by Jerry Byrd
fetching id for Good Timin' by Jimmy Jones ...
fetching id for Swingin' School by Bobby Rydell ...
--> [error] Something Happened by Paul Anka
fetching id for Johnny Freedom by Johnny Horton ...
fetching id for Second Honeymoon by Johnny Cash ...
fetching id for Train Of Love by Annette With The Afterbeats ...
fetching id for Down Yonder by Johnny And The Hurricanes ...
fetching id for Down The Street To 301 by Johnny Cash And The Tennessee Two ...
fetching id for Theme From "The Unforgiven" (The Need For Love) by Don Costa And His Orchestra And Chorus ...
--> [error] Down The Street To 301 by Johnny Cash And The Tennessee Two
fetching id for Whip It On Me by Jessie Hill ...
fetching id for Honky-Tonk Girl by Johnny Cash ...
fetching id for She's Just A Whole Lot Like You by Hank Thompson ...
fetching id for Young Emotions by Ricky Nelson ...
fetching id for Ding-A-Ling by Bobby Rydell ...
fetching id for Another Sleepless Night by Jimmy Clanton ...
fetching id for All The Love I've Got by Marv Johnson ...
fetching id for Banjo Boy by Jan And Kjeld ...
fetching id for Doggin' Around by Jackie Wilson ...
fetching id for I'll Be There by Bobby Darin ...
fetching id for Stuck On You by Elvis Presley With The Jordanaires ...
--> [error] Theme From "The Unforgiven" (The Need For Love) by Don Costa And His Orchestra And Chorus
fetching id for Lonely Winds by The Drifters ...
fetching id for Spring Rain by Pat Boone ...
fetching id for He'll Have To Stay by Jeanne Black ...
fetching id for Sixteen Reasons by Connie Stevens ...
fetching id for Night by Jackie Wilson ...
fetching id for Theme For Young Lovers by Percy Faith And His Orchestra ...
fetching id for Ooh Poo Pah Doo - Part II by Jessie Hill ...
fetching id for Walking The Floor Over You by Pat Boone ...
--> [error] Spring Rain by Pat Boone
fetching id for Jump Over by Freddy Cannon ...
fetching id for Dutchman's Gold by Walter Brennan With Billy Vaughn and his Orchestra ...
--> [error] Walking The Floor Over You by Pat Boone
fetching id for Ain't Gonna Be That Way by Marv Johnson ...
fetching id for Cherry Pie by Skip And Flip ...
fetching id for Greenfields by The Brothers Four ...
fetching id for Sink The Bismark by Johnny Horton ...
fetching id for Cradle Of Love by Johnny Preston ...
fetching id for National City by Joiner, Arkansas Junior High School Band ...
fetching id for Exclusively Yours by Carl Dobkins, Jr. ...
fetching id for Mountain Of Love by Harold Dorman ...
fetching id for You Were Born To Be Loved by Billy Bland ...
--> [error] Exclusively Yours by Carl Dobkins, Jr.
fetching id for Stairway To Heaven by Neil Sedaka ...
fetching id for Pink Chiffon by Mitchell Torok ...
fetching id for Let The Little Girl Dance by Billy Bland ...
fetching id for Comin' Down With Love by Mel Gadson ...
--> [error] You Were Born To Be Loved by Billy Bland
fetching id for Banjo Boy by Dorothy Collins ...
--> [error] Comin' Down With Love by Mel Gadson
fetching id for You've Got The Power by James Brown And The Famous Flames ...
--> [error] Banjo Boy by Dorothy Collins
fetching id for Found Love by Jimmy Reed ...
fetching id for Mr. Lucky by Henry Mancini ...
fetching id for Think by James Brown And The Famous Flames ...
fetching id for Mister Lonely by The Videls ...
fetching id for The Old Lamplighter by The Browns Featuring Jim Edward Brown ...
fetching id for When You Wish Upon A Star by Dion & The Belmonts ...
fetching id for Barbara by The Temptations ...
--> [error] You've Got The Power by James Brown And The Famous Flames
fetching id for Alley-Oop by The Dyna-Sores ...
fetching id for Ebb Tide by The Platters Featuring Tony Williams ...
fetching id for The Madison by Al Brown's Tunetoppers Featuring Cookie Brown ...
--> [error] Barbara by The Temptations
fetching id for La Montana (If She Should Come To You) by Frank DeVol And His Rainbow Strings ...
--> [error] The Madison by Al Brown's Tunetoppers Featuring Cookie Brown
fetching id for Nobody Loves Me Like You by The Flamingos ...
fetching id for Always It's You by The Everly Brothers ...
fetching id for Got A Girl by The Four Preps ...
fetching id for Tuxedo Junction by Frankie Avalon ...
fetching id for Banjo Boy by Art Mooney And His Orchestra ...
--> [error] La Montana (If She Should Come To You) by Frank DeVol And His Rainbow Strings
fetching id for Oh, Little One by Jack Scott ...
fetching id for Fame And Fortune by Elvis Presley With The Jordanaires ...
fetching id for The Way Of A Clown by Teddy Randazzo ...
fetching id for A Cottage For Sale by Little Willie John ...
fetching id for Shadows Of Love by LaVern Baker ...
fetching id for Mojo Workout (Dance) by Larry Bright ...
--> [error] Banjo Boy by Art Mooney And His Orchestra
fetching id for Biology by Danny Valentino ...
--> [error] Mojo Workout (Dance) by Larry Bright
fetching id for I'll Be Seeing You by The Five Satins ...
--> [error] Biology by Danny Valentino
fetching id for Right By My Side by Ricky Nelson ...
fetching id for Step By Step by The Crests ...
fetching id for White Silver Sands by Bill Black's Combo ...
fetching id for The Madison Time - Part I by Ray Bryant Combo ...
fetching id for The Ties That Bind by Brook Benton ...
fetching id for Just A Closer Walk With Thee by Jimmie Rodgers ...
fetching id for For Love by Lloyd Price and His Orchestra ...
fetching id for Tell Me That You Love Me by Fats Domino ...
fetching id for No If's - No And's by Lloyd Price and His Orchestra ...
--> [error] The Madison Time - Part I by Ray Bryant Combo
fetching id for What Am I Living For by Conway Twitty ...
fetching id for River, Stay 'Way From My Door by Frank Sinatra ...
fetching id for La Montana (If She Should Come To You) by Roger Williams ...
fetching id for The Yen Yet Song by Gary Cane And His Friends ...
--> [error] La Montana (If She Should Come To You) by Roger Williams
fetching id for I Love The Way You Love by Marv Johnson ...
fetching id for He'll Have To Go by Jim Reeves ...
fetching id for Footsteps by Steve Lawrence ...
fetching id for Apple Green by June Valli ...
--> [error] The Yen Yet Song by Gary Cane And His Friends
fetching id for Sweet Nothin's by Brenda Lee ...
fetching id for City Lights by Debbie Reynolds ...
fetching id for The Theme From "A Summer Place" by Percy Faith And His Orchestra ...
fetching id for Angela Jones by Johnny Ferguson ...
--> [error] Apple Green by June Valli
fetching id for Down The Aisle by Ike Clanton ...
fetching id for The Urge by Freddy Cannon ...
fetching id for Puppy Love by Paul Anka ...
fetching id for Earth Angel by Johnny Tillotson ...
fetching id for Money (That's what I want) by Barrett Strong ...
fetching id for Fannie Mae by Buster Brown ...
fetching id for Hither And Thither And Yon by Brook Benton ...
fetching id for You Don't Know Me by Lenny Welch ...
fetching id for A Star Is Born (A Love Has Died) by Mark Dinning ...
--> [error] Angela Jones by Johnny Ferguson
fetching id for Last Chance by Collay & the Satellites ...
--> [error] A Star Is Born (A Love Has Died) by Mark Dinning
fetching id for Beautiful Obsession by Sir Chauncey and his exciting strings ...
fetching id for Wheel Of Fortune by LaVern Baker ...
fetching id for Mama by Connie Francis ...
fetching id for Clementine by Bobby Darin ...
fetching id for Big Iron by Marty Robbins ...
fetching id for Don't Throw Away All Those Teardrops by Frankie Avalon ...
fetching id for Pledging My Love by Johnny Tillotson ...
fetching id for Wild One by Bobby Rydell ...
fetching id for Just One Time by Don Gibson ...
fetching id for Besame Mucho (Part I) by The Coasters ...
fetching id for Is It Wrong (For Loving You) by Webb Pierce ...
fetching id for Before I Grow Too Old by Fats Domino ...
fetching id for Easy Lovin' by Wade Flemons ...
fetching id for Jenny Lou by Sonny James ...
fetching id for Put Your Arms Around Me Honey by Ray Smith ...
fetching id for Baby What You Want Me To Do by Jimmy Reed ...
fetching id for Someone Loves You, Joe by The Singing Belles ...
--> [error] Beautiful Obsession by Sir Chauncey and his exciting strings
fetching id for Summer Set by Monty Kelly And His Orchestra ...
--> [error] Someone Loves You, Joe by The Singing Belles
fetching id for (Welcome) New Lovers by Pat Boone ...
fetching id for Teddy by Connie Francis ...
fetching id for O Dio Mio by Annette ...
fetching id for Teen-Ex by The Browns Featuring Jim Edward Brown ...
fetching id for Am I That Easy To Forget by Debbie Reynolds ...
fetching id for Beatnik Fly by Johnny And The Hurricanes ...
fetching id for Little Bitty Girl by Bobby Rydell ...
fetching id for (There Was A) Tall Oak Tree by Dorsey Burnette ...
fetching id for Starbright by Johnny Mathis ...
fetching id for Harbor Lights by The Platters ...
fetching id for Wake Me When It's Over by Andy Williams ...
--> [error] Summer Set by Monty Kelly And His Orchestra
fetching id for Think Me A Kiss by Clyde McPhatter ...
fetching id for Baby (You've Got What It Takes) by Dinah Washington & Brook Benton ...
fetching id for Two Thousand, Two Hundred, Twenty-Three Miles by Patti Page ...
--> [error] Wake Me When It's Over by Andy Williams
fetching id for It Could Happen To You by Dinah Washington ...
fetching id for El Matador by The Kingston Trio ...
fetching id for Ruby by Adam Wade ...
fetching id for Someday (You'll Want Me to Want You) by Della Reese ...
fetching id for This Magic Moment by The Drifters ...
fetching id for Caravan by Santo & Johnny ...
fetching id for Don't Deceive Me by Ruth Brown ...
fetching id for Rockin' Red Wing by Sammy Masters ...
fetching id for (Doin' The) Lovers Leap by Webb Pierce ...
--> [error] Two Thousand, Two Hundred, Twenty-Three Miles by Patti Page
fetching id for Shazam! by Duane Eddy His Twangy Guitar And The Rebels ...
--> [error] (Doin' The) Lovers Leap by Webb Pierce
fetching id for Handy Man by Jimmy Jones ...
fetching id for Down By The Riverside by Les Compagnons De La Chanson ...
--> [error] Shazam! by Duane Eddy His Twangy Guitar And The Rebels
fetching id for Forever by The Little Dippers ...
fetching id for Lady Luck by Lloyd Price and His Orchestra ...
--> [error] Down By The Riverside by Les Compagnons De La Chanson
fetching id for China Doll by The Ames Brothers ...
fetching id for What In The World's Come Over You by Jack Scott ...
fetching id for Paradise by Sammy Turner ...
fetching id for At My Front Door by Dee Clark ...
fetching id for Teenage Sonata by Sam Cooke ...
fetching id for My Empty Room by Little Anthony And The Imperials ...
fetching id for Lawdy Miss Clawdy by Gary Stites ...
--> [error] China Doll by The Ames Brothers
fetching id for Midnite Special by Paul Evans ...
--> [error] Lawdy Miss Clawdy by Gary Stites
fetching id for Beyond The Sea by Bobby Darin ...
fetching id for Chattanooga Choo Choo by Ernie Fields & Orch. ...
fetching id for Let It Be Me by The Everly Brothers ...
fetching id for Why Do I Love You So by Johnny Tillotson ...
fetching id for Delaware by Perry Como ...
fetching id for Teen Angel by Mark Dinning ...
fetching id for Running Bear by Johnny Preston ...
fetching id for How Deep Is The Ocean by Miss Toni Fisher ...
--> [error] Midnite Special by Paul Evans
fetching id for Rockin' Little Angel by Ray Smith ...
fetching id for About This Thing Called Love by Fabian ...
--> [error] How Deep Is The Ocean by Miss Toni Fisher
fetching id for String Along by Fabian ...
--> [error] About This Thing Called Love by Fabian
fetching id for Don't Fence Me In by Tommy Edwards ...
fetching id for Why I'm Walkin' by Stonewall Jackson ...
fetching id for Outside My Window by The Fleetwoods ...
fetching id for Adam And Eve by Paul Anka ...
fetching id for Where Or When by Dion & The Belmonts ...
fetching id for What Do You Want? by Bobby Vee ...
fetching id for Never Let Me Go by Lloyd Price and His Orchestra ...
--> [error] String Along by Fabian
fetching id for House Of Bamboo by Earl Grant ...
fetching id for Anyway The Wind Blows by Doris Day ...
fetching id for The Same Old Me by Guy Mitchell ...
fetching id for Chattanooga Shoe Shine Boy by Freddy Cannon ...
fetching id for Tracy's Theme by Spencer Ross ...
--> [error] Anyway The Wind Blows by Doris Day
fetching id for Country Boy by Fats Domino ...
fetching id for El Paso by Marty Robbins ...
fetching id for Down By The Station by The Four Preps ...
fetching id for Road Runner by Bo Diddley ...
fetching id for Lonely Blue Boy by Conway Twitty ...
fetching id for Bulldog by The Fireballs ...
fetching id for Too Much Tequila by The Champs ...
fetching id for Lucky Devil by Carl Dobkins, Jr. ...
--> [error] Tracy's Theme by Spencer Ross
fetching id for Crazy Arms by Bob Beckham ...
--> [error] Lucky Devil by Carl Dobkins, Jr.
fetching id for Just A Little Bit by Rosco Gordon ...
fetching id for Jambalaya by Bobby Comstock And The Counts ...
--> [error] Crazy Arms by Bob Beckham
fetching id for (Baby) Hully Gully by The Olympics ...
fetching id for You Got What It Takes by Marv Johnson ...
fetching id for Bad Boy by Marty Wilde ...
fetching id for I Need You Now by 100 Strings and Jono (Choir of 40 Voices) ...
--> [error] Jambalaya by Bobby Comstock And The Counts
fetching id for Eternally by Sarah Vaughan ...
fetching id for Alvin's Orchestra by David Seville And The Chipmunks ...
--> [error] I Need You Now by 100 Strings and Jono (Choir of 40 Voices)
fetching id for Shimmy, Shimmy, Ko-Ko-Bop by Little Anthony And The Imperials ...
fetching id for Pretty Blue Eyes by Steve Lawrence ...
fetching id for Straight A's In Love by Johnny Cash And The Tennessee Two ...
--> [error] Alvin's Orchestra by David Seville And The Chipmunks
fetching id for Too Pooped To Pop ("Casey") by Chuck Berry ...
fetching id for Words by Pat Boone ...
--> [error] Straight A's In Love by Johnny Cash And The Tennessee Two
fetching id for Let It Rock by Chuck Berry ...
fetching id for Go, Jimmy, Go by Jimmy Clanton ...
fetching id for Sleepy Lagoon by The Platters ...
fetching id for Time And The River by Nat King Cole ...
fetching id for On The Beach by Frank Chacksfield And His Orch. ...
fetching id for Clementine by Jan & Dean ...
--> [error] Words by Pat Boone
fetching id for Suddenly by Nickey DeMatteo ...
fetching id for T.L.C. Tender Love And Care by Jimmie Rodgers ...
fetching id for Whatcha' Gonna Do by Nat King Cole ...
fetching id for A Closer Walk by Pete Fountain ...
fetching id for Waltzing Matilda by Jimmie Rodgers ...
fetching id for Why by Frankie Avalon ...
fetching id for The Big Hurt by Miss Toni Fisher ...
fetching id for Darling Lorraine by The Knockouts ...
fetching id for The Village Of St. Bernadette by Andy Williams ...
fetching id for Werewolf by The Frantics ...
fetching id for Just Give Me A Ring by Clyde McPhatter ...
--> [error] Suddenly by Nickey DeMatteo
fetching id for You're My Baby by Sarah Vaughan ...
fetching id for Little Coco Palm by Jerry Wallace ...
fetching id for Teensville by Chet Atkins ...
--> [error] Just Give Me A Ring by Clyde McPhatter
fetching id for I Was Such A Fool (To Fall In Love With You) by The Flamingos ...
fetching id for Way Down Yonder In New Orleans by Freddie Cannon ...
--> [error] Teensville by Chet Atkins
fetching id for Sandy by Larry Hall ...
fetching id for Time After Time by Frankie Ford ...
fetching id for That Old Feeling by Kitty Kallen ...
fetching id for It's Time To Cry by Paul Anka ...
fetching id for Secret Of Love by Elton Anderson With Sid Lawrence Combo ...
fetching id for (Do The) Mashed Potatoes (Part 1) by Nat Kendrick And The Swans ...
fetching id for Up Town by Roy Orbison ...
--> [error] Way Down Yonder In New Orleans by Freddie Cannon
fetching id for Among My Souvenirs by Connie Francis ...
fetching id for The Old Payola Roll Blues (Side I) by Stan Freberg ...
fetching id for Peace Of Mind by Teresa Brewer ...
fetching id for Bonnie Came Back by Duane Eddy His Twangy Guitar And The Rebels ...
--> [error] Up Town by Roy Orbison
fetching id for First Name Initial by Annette With The Afterbeats ...
--> [error] Bonnie Came Back by Duane Eddy His Twangy Guitar And The Rebels
fetching id for Amapola by Jacky Noguez And His Orchestra ...
--> [error] First Name Initial by Annette With The Afterbeats
fetching id for Tell Her For Me by Adam Wade ...
fetching id for If I Had A Girl by Rod Lauren ...
fetching id for Little Things Mean A Lot by Joni James ...
fetching id for Mumblin' Mosie by The Johnny Otis Show ...
--> [error] Amapola by Jacky Noguez And His Orchestra
fetching id for The Happy Muleteer by Ivo Robic ...
fetching id for Honey Hush by Joe Turner ...
fetching id for Not One Minute More by Della Reese ...
fetching id for Hound Dog Man by Fabian ...
fetching id for No Love Have I by Webb Pierce ...
--> [error] Mumblin' Mosie by The Johnny Otis Show
fetching id for What's Happening by Wade Flemons ...
--> [error] No Love Have I by Webb Pierce
fetching id for Let Them Talk by Little Willie John ...
fetching id for Baciare Baciare (Kissing Kissing) by Dorothy Collins ...
fetching id for Heartaches By The Number by Guy Mitchell ...
fetching id for Smokie - Part 2 by Bill Black's Combo ...
fetching id for How About That by Dee Clark ...
fetching id for Mack The Knife by Bobby Darin ...
fetching id for I Know What God Is by Perry Como ...
fetching id for Forever by Billy Walker ...
fetching id for Skokiaan (South African Song) by Bill Haley And His Comets ...
fetching id for Honey Love by Narvel Felts ...
fetching id for I Can't Say Goodbye by The Fireflies Featuring Ritchie Adams ...
fetching id for Cry Me A River by Janice Harper ...
--> [error] Smokie - Part 2 by Bill Black's Combo
fetching id for This Friendly World by Fabian ...
fetching id for Don't Let The Sun Catch You Cryin' by Ray Charles ...
fetching id for If You Need Me by Fats Domino ...
fetching id for Just Come Home by Hugo & Luigi ...
--> [error] Cry Me A River by Janice Harper
fetching id for Run Red Run by The Coasters ...
fetching id for Mary Don't You Weep by Stonewall Jackson ...
fetching id for A Year Ago Tonight by The Crests ...
fetching id for One Mint Julep by Chet Atkins ...
--> [error] Just Come Home by Hugo & Luigi
fetching id for I Don't Know What It Is by The Bluenotes ...
--> [error] One Mint Julep by Chet Atkins
fetching id for Tear Drop by Santo & Johnny ...
fetching id for I Wanna Be Loved by Ricky Nelson ...
fetching id for Let's Try Again by Clyde McPhatter ...
fetching id for Oh! Carol by Neil Sedaka ...
fetching id for I'll Take Care Of You by Bobby Bland ...
fetching id for Talk That Talk by Jackie Wilson ...
fetching id for What About Us by The Coasters ...
fetching id for The Whiffenpoof Song by Bob Crewe ...
--> [error] I Don't Know What It Is by The Bluenotes
fetching id for My Little Marine by Jamie Horton ...
--> [error] The Whiffenpoof Song by Bob Crewe
fetching id for We Got Love by Bobby Rydell ...
fetching id for Scarlet Ribbons (For Her Hair) by The Browns ...
fetching id for So Many Ways by Brook Benton ...
fetching id for I Forgot More Than You'll Ever Know by Sonny James ...
fetching id for Believe Me by Royal Teens ...
--> [error] My Little Marine by Jamie Horton
fetching id for Since I Made You Cry by The Rivieras ...
fetching id for Mediterranean Moon by The Rays ...
--> [error] Believe Me by Royal Teens
fetching id for Mighty Good by Ricky Nelson ...
fetching id for In The Mood by Ernie Fields & Orch. ...
fetching id for Come Into My Heart by Lloyd Price and His Orchestra ...
--> [error] Mediterranean Moon by The Rays
fetching id for God Bless America by Connie Francis ...
fetching id for Uh! Oh! Part 2 by The Nutty Squirrels ...
fetching id for Danny Boy by Conway Twitty ...
fetching id for Be My Guest by Fats Domino ...
fetching id for Let The Good Times Roll by Ray Charles ...
fetching id for Misty by Johnny Mathis ...
fetching id for Livin' Dangerously by The McGuire Sisters ...
--> [error] Uh! Oh! Part 2 by The Nutty Squirrels
fetching id for Reveille Rock by Johnny And The Hurricanes ...
fetching id for Swingin' On A Rainbow by Frankie Avalon ...
fetching id for This Time Of The Year by Brook Benton ...
fetching id for Marina by Rocco Granata and the International Quintet ...
--> [error] Livin' Dangerously by The McGuire Sisters
fetching id for Dance With Me by The Drifters ...
fetching id for Climb Ev'ry Mountain by Tony Bennett ...
fetching id for Always by Sammy Turner ...
fetching id for Marina by Willy Alberti ...
fetching id for Riverboat by Faron Young ...
fetching id for Don't You Know by Della Reese ...
fetching id for Teenage Hayride by Tender Slim ...
fetching id for Mr. Blue by The Fleetwoods ...
fetching id for The Clouds by The Spacemen ...
fetching id for (New In) The Ways Of Love by Tommy Edwards ...
fetching id for (If You Cry) True Love, True Love by The Drifters ...
fetching id for Wont'cha Come Home by Lloyd Price and His Orchestra ...
--> [error] Teenage Hayride by Tender Slim
fetching id for Promise Me A Rose (A Slight Detail) by Anita Bryant ...
fetching id for (Seven Little Girls) Sitting In The Back Seat by Paul Evans and the Curls ...
--> [error] Promise Me A Rose (A Slight Detail) by Anita Bryant
fetching id for Uh! Oh! Part 1 by The Nutty Squirrels ...
fetching id for I'm Movin' On by Ray Charles and his Orchestra ...
--> [error] Uh! Oh! Part 1 by The Nutty Squirrels
fetching id for The Sound Of Music by Patti Page ...
fetching id for High School U.S.A. by Tommy Facenda ...
fetching id for The Happy Reindeer by Dancer, Prancer And Nervous ...
--> [error] High School U.S.A. by Tommy Facenda
fetching id for Smokie-Part 2 by Bill Doggett ...
fetching id for Just As Much As Ever by Bob Beckham ...
--> [error] The Happy Reindeer by Dancer, Prancer And Nervous
fetching id for Primrose Lane by Jerry Wallace With The Jewels ...
--> [error] Just As Much As Ever by Bob Beckham
fetching id for Love Potion No. 9 by The Clovers ...
fetching id for The Little Drummer Boy by Johnny Cash ...
fetching id for Do-Re-Mi by Mitch Miller ...
fetching id for Deck Of Cards by Wink Martindale ...
fetching id for Happy Anniversary by Jane Morgan ...
fetching id for One More Chance by Rod Bernard ...
Esecuzione completata in 15323.7393 secondi
# creo backup del billboard dataset
df_billboard_bak = df_billboard.copy()_____no_output_____# inserisco gli id ottenuti in una nuova colonna nel df_billboard
ids = np.array(output)[:,0]
df_billboard.insert(0, 'id', ids)_____no_output_____# calcolo percentuale di canzoni trovate
found_id = df_billboard.id.count()
x = (found_id / df_billboard.title.count()) * 100
print("Found ids = %d%%" % x)Found ids = 82%
# esporto su google drive
from google.colab import drive
# mounts the google drive to Colab Notebook
drive.mount('/content/drive',force_remount=True)
df_billboard.to_csv('/content/drive/My Drive/Colab Notebooks/datasets/billboard+ids_3.csv')Mounted at /content/drive
</code>
##Recupero audio features del Billboard dataset_____no_output_____
<code>
# reimporto dataset billboard (con ids) + dataset principale
"""
drive.CreateFile({'id':'1fZzuYu-HXKP9HUeio-FL9P4eNygOQ0qq'}).GetContentFile('billboard+ids_0.csv')
df_billboard = pd.read_csv("billboard+ids_0.csv").drop('Unnamed: 0',axis=1)
drive.CreateFile({'id':'1eOqgPk_izGXKIT5y6KfqPkmKWqBonVc0'}).GetContentFile('dataset2_X_billboard.csv')
df_songs = pd.read_csv("dataset2_X_billboard.csv").drop('Unnamed: 0',axis=1)
"""_____no_output_____# elimino valori nulli (= id non trovati)
df_billboard = df_billboard.dropna()
# creo lista con gli id del dataset billboard
ids = list(df_billboard.id.array)_____no_output_____# creo lista degli id che non sono presenti nel dataset principale
time_0 = time.perf_counter()
ids_new = [id for id in ids if id not in list(df_songs.id.array)]
print_exec_time(time_0)Esecuzione completata in 1374.0148 secondi
time_0 = time.perf_counter()
with concurrent.futures.ProcessPoolExecutor() as executor:
results = executor.map(get_features, ids_new)
output = []
for result in results:
output.append(result)
print_exec_time(time_0)fetching features for id: 3PyQV3cDjV5tEJGpYYH2K1
fetching features for id: 7qHGRefOGiaPqrG4IEckcv
fetching features for id: 1PSfY3lSwBD888PZW0s5JH
fetching features for id: 1er2tyXb4iw23SfZxb1FW1
fetching features for id: 6C9MYPkHrESo8n3KnapaR9
fetching features for id: 2kLrFBgVs9BsyNQ6EDc5LH
fetching features for id: 4Ht8wlFBxdNiQQSdWTBOik
fetching features for id: 4Dxm37KSWN6xXTn98ddrGp
fetching features for id: 4DvqAY9mzeTjQUNvsROwji
fetching features for id: 6nd9sBgQj3ABs1ZmNrStoR
fetching features for id: 26R4HdOOnj4zYGl8lbYW9j
fetching features for id: 6IjvCFBNprUHhlamLey2Tb
fetching features for id: 7K40sZ0ZV6vGjzSso2nFyr
fetching features for id: 3NcbI0iX8VQyR9D3qsmNjl
fetching features for id: 3HZeaD9anHDrTM1AW5PY7y
fetching features for id: 4K0PEpyhPdemgNj1CRPGOj
fetching features for id: 5raayWOkxcm1pt5p59xN8k
fetching features for id: 723SZaMK90mxb59BZj3Oig
fetching features for id: 3YBzEkdpzpp0NomRIl56da
fetching features for id: 1sEImexIhca9b7pUcpObsC
fetching features for id: 2F4g7wn3slLJrDg9SY79gM
fetching features for id: 1ZtPpbHgnj6JLIv1Rpuc3x
fetching features for id: 2nRk3PqhhekI963sCvoRrm
fetching features for id: 7agk25vHxeKe626vePNxqz
fetching features for id: 4XHuEHpj3YJIA5NGt7iyCh
fetching features for id: 6jxi8P16mztEUWNKTWp9k6
fetching features for id: 6a0k23wrj492S3BQm3iHMK
fetching features for id: 3FUGhktHS6iEAReiDwXuqm
fetching features for id: 7tu5axZXIdRYPdzmRFcogZ
fetching features for id: 0FBxYLStk8H7oTcyx8iW0P
fetching features for id: 5TyyswOrL22rJAJvvuCLVA
fetching features for id: 5SX9wQDM4Zbf87SDQoldLH
fetching features for id: 0a32BQVIZxqDYiSHtvpAkb
fetching features for id: 1NyYgY2eqKQWJJrnp5kgPW
fetching features for id: 1XM8rG5yEi8wTeTSHJUoom
fetching features for id: 4QZONvX5YwXrvpZQxgysBh
fetching features for id: 0bnCQbMzi1aPhG2i1XnVO4
fetching features for id: 18rfUVDkibMedZIFNjE8IE
fetching features for id: 49TAefltwdGl085fK0EcUV
fetching features for id: 6sJCTxB1HY8W3GvCvhu5GP
fetching features for id: 0bPO6CAxcjhiSwTlwrjLtQ
fetching features for id: 276DTCILFYlkQlafEXvnV7
fetching features for id: 0qxxN0dtn5GZ0wNEboFekI
fetching features for id: 2m8GdIN3YhG5JNxHFHAXOQ
fetching features for id: 7lWrbXLcsWuRYtS8dqQgNX
fetching features for id: 6Nkt6UsJbLQpNeyW0pcoSr
fetching features for id: 4Nx3qqb1GPGkbSljU6K13g
fetching features for id: 0ds9FJOCQjZa4sgTRgXujQ
fetching features for id: 6Udetniaf2njLUgs7Y03Iv
fetching features for id: 7jpdW7yH8Q1zdWL4W96jxf
fetching features for id: 6fsAxEHvbVPU8pVPnBXaX2
fetching features for id: 4d8MN3egxWp2lqC7J7VKlj
fetching features for id: 72JpCXt7QiZxC9sW7XNXmP
fetching features for id: 6tvGUktKAUgCMPl4hyY4un
fetching features for id: 0e7Yx4UFoJ7jzx8l7XotMK
fetching features for id: 4WGm5D8wrFPRwaRnyWdaf8
fetching features for id: 4q7sSG5Ki7S9DqrjPztywV
fetching features for id: 40w47r4QO4Gwfcg5kdqb7h
fetching features for id: 1H28BobfKOhI3odo2ZIWY0
fetching features for id: 4tElChKuTE2XTiyKwLWwxq
fetching features for id: 26qDFDKrveLljfIX5ft061
fetching features for id: 49dCQGqLHLqlgTcnxqN5h6
fetching features for id: 1WFZTMBQabuD1Thl7OPBlk
fetching features for id: 5gzvMkVATJt5XmrVq3HzUT
fetching features for id: 6IbJJnUdfLx8tIihXgnVUE
fetching features for id: 63DZKZIbIxSf8XUfqIB6wC
fetching features for id: 4wwd1GByeADC5G0awAXPsz
fetching features for id: 09jpVE2px743apfFt7vJif
fetching features for id: 5K1xCoqZoqNkQQndzjMKJX
fetching features for id: 3Ndvx2mxs9Z8QiMjP7NuYX
fetching features for id: 26KwdOu3ALLhOaIRFlG48G
fetching features for id: 4FtNl9Y6iNY96jknEQ7sXu
fetching features for id: 77uBKmD6CASCdkShSM3SPt
fetching features for id: 0HLPvnTJQ5VGBK6hgzxYFG
fetching features for id: 77OrSP1deVWbCGtE1FNNlr
fetching features for id: 3MEsh9TCi4D03XLhMgYw2u
fetching features for id: 59XNlPnp7mkbCTupmPocbJ
fetching features for id: 1A4eGVWeU3TUd1MxNdxU65
fetching features for id: 1seMwNogcfKoM4l3dYf2GH
fetching features for id: 7qbqZKFVQUzYXqjfThNidJ
fetching features for id: 3AHbmf5Jt282ebdkGWwozI
fetching features for id: 7bV9hOdb1NqHCGBJgREmw7
fetching features for id: 5CSRuOixA0hWAvctAM44wJ
fetching features for id: 4QscbhPWgoercB4xv645QE
fetching features for id: 2e3lyF4HwlMtzUL5FffZZG
fetching features for id: 5VGhkTMwXleuHmtbrCrvix
fetching features for id: 1Rns2Yiuu0yiLDeCOqClJH
fetching features for id: 4mBuXy2y0frBrt8qTiP0mY
fetching features for id: 4gZqRlaWMKcjUFh1Uss8fI
fetching features for id: 38VSNNXo448TWVYccZT9vF
fetching features for id: 2iNfDmKtgvrK120J8DP9KF
fetching features for id: 47rtbd1K2285AETpjioeyG
fetching features for id: 0HglXwBvTeDnVGYZVUTKJg
fetching features for id: 72uhF6sS2bRB0K4E7v4Lnc
fetching features for id: 0SY37QOK0zoT0M94mWDZt9
fetching features for id: 0hA8G8smCwi1h1nmxyRqT3
fetching features for id: 74FDr9vJuxKWvXPhF5lPf7
fetching features for id: 6g2RtR71Qj12dKQIwPpFJc
fetching features for id: 0uGvmNaATnaJlcfHJ7oLt8
fetching features for id: 6bZPMgJkTV6i5Mc4PZKKWJ
fetching features for id: 7AZJqLdDI22YvziojIveKg
fetching features for id: 1mrSHPNTDdMkXpR4BiSiD7
fetching features for id: 1aIh8C6OpXxOdM4pJatdIW
fetching features for id: 1uGpujmeUYG2PWOfUWiSUT
fetching features for id: 2wbbw0UcpKf8fNjy7oRLbA
fetching features for id: 4gZAcHUmSixJCsLgh2N8Jf
fetching features for id: 3BH6jRogHdFvpGkQoxzJTZ
fetching features for id: 2jd2VUyrGyhHHRzSsJuq6P
fetching features for id: 7g4LLcIDNC1gVcz97dpVR2
fetching features for id: 42iJbtyZ5osLlnk5L0aTVT
fetching features for id: 3JC4gY9aHE2Qvjye9Ucqnw
fetching features for id: 5FTEGiVTI0D4wburkn1FNR
fetching features for id: 0mFB7wfCbTF7I3JNVxZozg
fetching features for id: 67RKpDruBXMJgAmhJAivXr
fetching features for id: 0qJ7VIc05OdciRRvTXAVkt
fetching features for id: 0It04LDUih6fnGVE2dcX2E
fetching features for id: 6rCVaWvrLlPNiIIhR2N6p2
fetching features for id: 0G9Fw7UD13Cxfs2rXm46zF
fetching features for id: 1wVNVHIywYAfHuVgdHXr3p
fetching features for id: 5AyTdLBYQCsqV9DMGCp3mV
fetching features for id: 2u0kV66s6u9OAVP6PtBf2f
fetching features for id: 5gO7fXwncP766lzRXlNvxa
fetching features for id: 7bwHniUnXqVp59mCbrNnJc
fetching features for id: 7pTL00cPeIGhvVNlq5qP0k
fetching features for id: 5Jb3RfbPcmkoKSy7003xOP
fetching features for id: 75h57zShgPCAEeV3mowddN
fetching features for id: 15P00DXhQEcVfGkdC6ip1F
fetching features for id: 7F8ggf9wjiUl9CCPtvcBrd
fetching features for id: 4F7DPcLkBhKIl6yhFMcwhm
fetching features for id: 2KOZDREeP4FsaqFs8tsMLZ
fetching features for id: 1iwcvFex3aq5mTosFFV2RG
fetching features for id: 5RlIBEQsCmqMPI9zbE4vwp
fetching features for id: 6cEfNnApZLeaYFUjHgpl4P
fetching features for id: 1nRc0NBPQTbK0LGPi94BtW
fetching features for id: 7FVFIPHARBN2EOz1eXkZth
fetching features for id: 2vcnoGHvP31cN4PW7B3EVs
fetching features for id: 5ENPFf3Q9qRXHVv5jSRLxS
fetching features for id: 5qwlLXzZ0FtwQYxwwsbWQw
fetching features for id: 6p37vbN1ckKthSv1XwnJ5N
fetching features for id: 3iSws76HjaU7k49EqJVTfF
fetching features for id: 4QU9f1lOmTHwIyH0YbbX67
fetching features for id: 1xKEI3TDR1NjJSgAlC384x
fetching features for id: 1Ij3korBS4VtNvTrXH4cYl
fetching features for id: 0dvUHLhkBR1p7Fo5Jk50IR
fetching features for id: 0LjsKq95QM3pZm6W04VeVA
fetching features for id: 0pnWjLrf784CX0CHKYoUVj
fetching features for id: 5drsRsgpbfvKGlhMcb5hKt
fetching features for id: 03igFc1dNZ3vlU7weZbs6B
fetching features for id: 0Hn2czXxRzvTHPshSNpXgT
fetching features for id: 0nDoWlJuMZQ3XfA70L2Loy
fetching features for id: 4fw2iGFp7f43NKSJnw4YM7
fetching features for id: 21WNxRnhckY2zoWd6TKPcE
fetching features for id: 0hA8G8smCwi1h1nmxyRqT3
fetching features for id: 17Y2K7uNxMkaTDBtf1Tcv3
fetching features for id: 3HuyiVITRMpjsebm2oogMT
fetching features for id: 36lRShqCywAnd6sWppukGi
fetching features for id: 5aA2EY9ZSK3yfbjM9QLGX6
fetching features for id: 6ajOegjuQpO3Zz7IYikL9z
fetching features for id: 0c0zlxvVFAVFezIkJ3N1t5
fetching features for id: 3kx3S0MbCTRkG6wDUMBwSw
fetching features for id: 7ntvk3Wh8i14tnMyAkRpfk
fetching features for id: 2aJ0ZdaL40vOB9dlZ7ZhzL
fetching features for id: 22t9iuxxXPGYuRZ0R0Mgej
fetching features for id: 3J0rLwjOx4yjavPjmYXJtE
fetching features for id: 7tldd9oCoMwNelucVLToWB
fetching features for id: 3bwGKA19ifHVldVGqbxFJL
fetching features for id: 2o4TJ9g2Fi2J4tkuT4gCka
fetching features for id: 7eCHZeIm2SxGEJkGEggstN
fetching features for id: 2cjoDBTUi9FO103xhAU6Tk
fetching features for id: 3SPnjlkyDnwbl4qJDjRzyZ
fetching features for id: 7M212oV6l0hn2AvitWUl7x
fetching features for id: 1ka15JCWyITGlO3pzbku7I
fetching features for id: 4W9dXLZ2ZV3pSYzea0k4Wz
fetching features for id: 6bEZFHtdXyFvugCSv6AMRD
fetching features for id: 2Kp2Ubw7XoTZCMCF0UM7DT
fetching features for id: 18WxWjAQtrhwpdbdLOCd7N
fetching features for id: 2mB7RE3hJubHlEJag8K9pj
fetching features for id: 12Lm52NdvlHFc3Z9XcQqs0
fetching features for id: 5qSB1mi5t7UojPB0mO39vr
fetching features for id: 54hR1Uq3uNO3uEY19XtqN9
fetching features for id: 3eTwRykJNtxtkYNGENVmNN
fetching features for id: 1mxE5cFNlYy42Xd3akQp7v
fetching features for id: 5Ef0xSKstUnm8masfM1k00
fetching features for id: 22SsanlYUsSCkRkwpIEOXp
fetching features for id: 6pw8j1U1XHLISTQAcHfVRH
fetching features for id: 6tTot4QmK15dofc6WaZNyf
fetching features for id: 7GYUDhLNm2DoG94NsaMd6J
fetching features for id: 029711BlHKszqkSd9GaNI6
fetching features for id: 0DMq2fTSzlmnqeeiU36Z66
fetching features for id: 69odFwjE1JL3kHZdEC1wpL
fetching features for id: 4mOhsQgceqXdUfz6zB9A0E
fetching features for id: 3RZMzCvYsmJ0u2ioKTOsmJ
fetching features for id: 1eUOJbd9u8kU3XiYwo7sx4
fetching features for id: 5HOA2Vu2Su13uijEv7krb0
fetching features for id: 0loo1QzCmSXerjeZBUVLPG
fetching features for id: 5t4871ZY9tLGew6wt6KBSK
fetching features for id: 3ab48EWVWFiCCI9glmD3pe
fetching features for id: 2yeExv5t15T8quco592dED
fetching features for id: 70MqJOpUHPB10OzrI06x7U
fetching features for id: 6zXrni8kurxfJvQYI8NXJL
fetching features for id: 5h6JA5GqW2kgq82RfEeptY
fetching features for id: 5kytQwmrbViv4f1twPs8BI
fetching features for id: 48jb06DQ2W3Qik0Bz0Ziqz
fetching features for id: 79VLN3Akfbtadc8IYuygQd
fetching features for id: 1S73njkLJ5orszfbNeqDQs
fetching features for id: 64uAxyHtJirAYhy9r6u1Zl
fetching features for id: 1kv28lKIdRA2aydk5ZYtW2
fetching features for id: 2dzeoQNVrmQtozoKn4l1Oi
fetching features for id: 4jbJsvzAgDQ6KwGjdVII13
fetching features for id: 1t4I9xuq9Mgdq4SR5hV8p5
fetching features for id: 6IpwJjawIPRENAGyEuKUo7
fetching features for id: 0Rz9IUnHJmSnPMqSHqYQkp
fetching features for id: 1c1PsSNvo3ylcBctdpd3QW
fetching features for id: 29P1hXAG4VZ21lSH4hUI5I
fetching features for id: 760hwLaTFEONDusgIS4FhW
fetching features for id: 6LQHfGK9wkx2kWxAPDbPil
fetching features for id: 57rxqpox1nfx6hwIqfAh0F
fetching features for id: 3Mz27X48Ey7MbfLOmSzkBK
fetching features for id: 0HJd5HwtTzuT5SZQBFzQdA
fetching features for id: 6MSTUcmtS8S0B4GAWyJzzT
fetching features for id: 2g8OuYXyWjNbLyVjFRGWDg
fetching features for id: 1FqmRC0JrT7WDEBVgSqUXJ
fetching features for id: 0SZkxd1GxCoGmgGXftrB2U
fetching features for id: 3tjcWYIhfDi6kPCvZbBvCJ
fetching features for id: 5e6X8odyjARbtQM4j9nE2z
fetching features for id: 2XYSkO6GWk1ZpINrFBsLWc
fetching features for id: 1GA3OPsFnpgqJcjqaQd0Lx
fetching features for id: 1fQn0TtyvkS6PjdVOitG4p
fetching features for id: 2F3vCjCAoGAzwh5CdOambh
fetching features for id: 4ZuODMZPjTLAExyuRSmAs6
fetching features for id: 1UUntqWL8aZ42vFqivpvtQ
fetching features for id: 0hBpAXaXrE3G0eSqHK7OIV
fetching features for id: 3LMXAUFA6PWe0iHyHfMKRL
fetching features for id: 0KJVesMtdQ9dolNb3ps2WR
fetching features for id: 0F6fa3I40SkP6oZYeFsXPA
fetching features for id: 6NJT5WTujxy2kA3Rr6ItF1
fetching features for id: 1LLCVdrvdQUdymGzbwwllb
fetching features for id: 4aBT6pCHBkbIWglVpd1VpE
fetching features for id: 5olP9v3xgBpvJlhWU9DoxX
fetching features for id: 4kmTlqtjkxHdcratIAj5Uu
fetching features for id: 6FXwTBdpv4wD0G0Sz3Wxn4
fetching features for id: 66g7RleXwZuFUlec54oXnG
fetching features for id: 09W94l8I09xXi8QRzsXXjL
fetching features for id: 6qsngfhruE792s577j5VwN
fetching features for id: 1q63VPAf7rnceC1bndpp6T
fetching features for id: 5lCPJWsjGhgCRVzrYsDuow
fetching features for id: 09c6fmAd8uvxROSmFHCkx9
fetching features for id: 49X8hXnlugShyHCIhSAXhh
fetching features for id: 5jQky3fADov04ITwQaCS3J
fetching features for id: 6kAHWX3CMLZvXBxQa3nSuK
fetching features for id: 1MuwDu0r693YRQw1lsKF3J
fetching features for id: 53JnBuXIHTP0Tyc1H9uayA
fetching features for id: 5fXjKHiDTgrovkagABJqU1
fetching features for id: 1J9L1lhlcjL1W4G3cnXjLZ
fetching features for id: 1NAUXbQoeuFpzTs1tSurBG
fetching features for id: 246K5vQofQkfzDqU4ms8Bu
fetching features for id: 098Q0YUZa12anVf9zi1LOn
fetching features for id: 354oJ0prNvlmvopjdUI1mE
fetching features for id: 1JswHxJx8wkD01FT8NvKdU
fetching features for id: 5zTT1Tp6nJyflksiZsM7xs
fetching features for id: 2jfj17B5I1eXuzOoyaFnt6
fetching features for id: 1f3t8Wmbvlu9q8uaAdj4bf
fetching features for id: 6LsQBpsEDQAlfPIOadu3uY
fetching features for id: 3daw0o8J2gWbNpat6Sud6r
fetching features for id: 6WdRtYX5QMaP503hgnFUEN
fetching features for id: 4eBKiYjrQpB6io7fzcqjIs
fetching features for id: 6XsxJlXSrSLUDVtUPn7E3C
fetching features for id: 7x5vC0sqmXpmTrgnuDMXmc
fetching features for id: 10h8NHPZTpNJl4yHpJtVyy
fetching features for id: 6RzqD964t0xz5yHHdvpEtO
fetching features for id: 0C24UTMQt8i9nJiXdxeJAv
fetching features for id: 2sbh4xPA4jBtmZNLcSrYNz
fetching features for id: 4lNYWHmZtlgkgntRn9mk51
fetching features for id: 7xxHlrlNMFC6ApQCUzGAhS
fetching features for id: 5yM1XZ2kk5A6Eh5u7kMLMk
fetching features for id: 4osZZATIyErv2ema2MjP7l
fetching features for id: 20OFwXhEXf12DzwXmaV7fj
fetching features for id: 5dClbtFrsEtU363gpPwX2I
fetching features for id: 7vMGfPlgbM41ssMPFSqA6y
fetching features for id: 4dvp1I0lYy5OgeMzugM8L2
fetching features for id: 5n2nKBJ0SuEgf1aWDEH4tn
fetching features for id: 7v7iraFbfC12qCZEJFmHAg
fetching features for id: 3glPdKy9ystj2fKTMCPfA8
fetching features for id: 60N1VHlOm2nR24bN1DVGU2
fetching features for id: 3ZIzonNJdYQDGXqVFYn3pg
fetching features for id: 1R1z2tryTXZApOWozqOM6j
fetching features for id: 0PNUyy6plH0rASsHyBkUNZ
fetching features for id: 0HJ545UgMU7ttqYHFvb6pn
fetching features for id: 0Hy7b6nSyN4MtFV5LFq7jn
fetching features for id: 09tyJ0VvbLty84iHIV3WQn
fetching features for id: 4AE3dRLSuWT4t1cWy5RkHc
fetching features for id: 2qVo961euBoetk1492Lnf1
fetching features for id: 4rJHIHMMAZXvwgM4hvR0kO
fetching features for id: 2cYehRcgTf0l9ULQlak6qL
fetching features for id: 5FrCb3dn8zBDQidICpI4Q5
fetching features for id: 6eHkSvvSolKfLgqQs4FZuf
fetching features for id: 7ziwiiXAuWS5aIBIlcGFzU
fetching features for id: 6lqB974fYAOAx6JhAZjqlJ
fetching features for id: 4X8yPSO0mgVVoFOhHAUJT2
fetching features for id: 1kgtLKvVAxeci5kU0u5ov7
fetching features for id: 2etDtwtgXPQE50ybBNclmh
fetching features for id: 7IeVNYGilvoqUniMCn7U4G
fetching features for id: 3viczOpmIQy8W78w5ejSQP
fetching features for id: 6itaiG5m7iBzAXivLuyXi8
fetching features for id: 7BGuuzMvamvGsmTL0Cvx7i
fetching features for id: 536gfzVYZkxywv1knXbVSx
fetching features for id: 6UVMhsNRskMxkAQhTXDQAh
fetching features for id: 3yPccz77NeVhPyETQRzRB9
fetching features for id: 0BTFoXd7zqH4nbYwzJQTTv
fetching features for id: 1S5fUiFmKltr00XfSRHNwf
fetching features for id: 5RlTopKNMnGMgFrvREcKgL
fetching features for id: 2eq1ZLy9Q1aaB8AjHAMb2m
fetching features for id: 6jnGyjY75bSW6mlzqoxjxv
fetching features for id: 0ZTr7I93DO88V9dVlWgGfo
fetching features for id: 1PuyOHjBUULlmPNstHPPoz
fetching features for id: 4THlEtZqUMIkZafbaQRzr1
fetching features for id: 34yIb7hw5SuR80au1BHTtt
fetching features for id: 0kcEK7YuNNfE7jiTnXS1cm
fetching features for id: 71a9vOAsTH6ze4bonGsZdK
fetching features for id: 767SZ6IM7yXRmiu3NruknZ
fetching features for id: 4LbfXBKo1Q2l1F2sEQqjs2
fetching features for id: 0S0QNeHAIWuwh7IEbzbxso
fetching features for id: 39pr604E2XOwb0kSWqzEfG
fetching features for id: 1EnkH8PmTfh9h0vDmORTVx
fetching features for id: 6MUHAvKGAmKXojn8wdFB71
fetching features for id: 7BjK8kpag8H0qhIuCs3eb4
fetching features for id: 0VCKQGl3Xx35JH3LbfH83z
fetching features for id: 01cZbN980X7YkWdzSRlBGD
fetching features for id: 4uZ1oRbgdkDmJ9o82Pa5hm
fetching features for id: 0Bp6HuwGiP610hAbLWISal
fetching features for id: 2Bdg1WuHjrtbLkRWvfagYn
fetching features for id: 2BZUWsWUDhRnGqfZOaOzfC
fetching features for id: 2L4jGkJPjDwZXuH1riZXvn
fetching features for id: 1DoB9qQAEHwV9s5BWofgUm
fetching features for id: 7HGPQkOoyUbObi3EMQuF9R
fetching features for id: 5npjUR2tkFHYwbmNPNaDNB
fetching features for id: 3XOVanRi3prWDEZQiZTlYx
fetching features for id: 5HWRHyGh5pkMYXHrrDfdpG
fetching features for id: 6abzPFNkbIcWv3QZonV4IB
fetching features for id: 7I3RJVzwh6OZhOPrfIJr7o
fetching features for id: 6wkrULQ7AzurQ0KpXzFDbH
fetching features for id: 5fDM8NlEJkwn8TScgmunEa
fetching features for id: 1V9jRNK8t5Y7k9zQ1H1Mw4
fetching features for id: 2woOKDGdPEh9XOKxjcPoBv
fetching features for id: 26D5mLklBxduCJmGZ210Rf
fetching features for id: 18ytBAGsV3KoubYAEGgKGt
fetching features for id: 0e27D3hGktUV6VEz3rkPqL
fetching features for id: 0FAjZhaX4I6krULvlqc0Sc
fetching features for id: 2KbvldtpfvwCWq9WLHo3sN
fetching features for id: 2Gj1vc5Z4cX673brTMDe8W
fetching features for id: 4IZ5cak7UK9j6CdaN4JfVJ
fetching features for id: 3r6EzxHkcX3JLzuV4a4waK
fetching features for id: 7sMVOxVJR6ctP6JUPyqndQ
fetching features for id: 061YcmAKl6tatyzkCCMYhG
fetching features for id: 0h95AzJJk0TQn38ZMVijDZ
fetching features for id: 3gXO4EiIocjy58CbyE8ipW
fetching features for id: 286qmvt9ztc3vTIFSoNxo3
fetching features for id: 78TdinDawX2yhF22cOSLkS
fetching features for id: 0ps392UqjroU35BNX4l3HL
fetching features for id: 5JDbqSNMXTRQgf2oCyPHWJ
fetching features for id: 3qq5qOc70lovPvgBqI0vBI
fetching features for id: 5pZrIoICBkvGDjMPBaNuwo
fetching features for id: 0JJwbDbM3J2hSPAmZsof57
fetching features for id: 5LvIl1VHaqtCJJXwrB8QR9
fetching features for id: 1TqwcNTF7f8nR9U0WlSGR3
fetching features for id: 1guE3vAGquSDNFjnZA7b2h
fetching features for id: 2BmP58UccDykNGiqJzZXfL
fetching features for id: 3LAa8tNBzwHNCgNYCpguFe
fetching features for id: 4fzQ3UPDlbvNbUP938zsJf
fetching features for id: 7f9TETJBuaMXfT9mfMsHGg
fetching features for id: 3PbgV4gC1U1we6i0PaF22v
fetching features for id: 5HmpCr8ZYWu42K8qUdAluG
fetching features for id: 4gFnX1BENnPnu5xLKDX77s
fetching features for id: 01AxKIwrI7bCLOZ0nmw41I
fetching features for id: 5WjfC5SYnuuEmtENFHaC9Z
fetching features for id: 58IiROv3C3SPnYEmx8zxcE
fetching features for id: 7p7kHvFpphFrlZvgKUhclw
fetching features for id: 0OxHWA10FOs5fD5M7bQ9qk
fetching features for id: 2oHcWtwRY6IEiKHCPrN5Ji
fetching features for id: 30CcCcghwpe24NuVmL2Mhq
fetching features for id: 2vsszTVacOB38dZlDUIHmM
fetching features for id: 7d4d77rUX0wQIkkZ9Hr6xj
fetching features for id: 5cfhjwlnKG5TkCUWdUNCct
fetching features for id: 4yBvtBvzuu7qprVBgmt9Op
fetching features for id: 4AnFpWY63ksuVzGqusI5zA
fetching features for id: 6qXUtnNvcnAyukfo1Zs0pd
fetching features for id: 1OC7vbblA5EzgCZgzuy7GC
fetching features for id: 1xKrmk59JTLhiTmp6RJDSJ
fetching features for id: 4ND9SgqQMRgoA0TEybnCvz
fetching features for id: 4NKuHWlIvRRP4P0uPfgVym
fetching features for id: 5kYMMNGEZDfVZZiAAmP0y2
fetching features for id: 31oeQLsLKmZq7X7aeHpTqO
fetching features for id: 3aAW6olIlXjRphwWhwY3s1
fetching features for id: 0FFMHepFyZnxasNxUykTCo
fetching features for id: 28gwNZzicN00sd6Y0ZKMLA
fetching features for id: 3YNaszqT4FEKafFrSMSD7z
fetching features for id: 6bQOE7Y65IxzWsxI0EcKgF
fetching features for id: 1ruJBQ8VifAL4nZjB91pkK
fetching features for id: 0QvePm7qtDWIL7Lbfeorba
fetching features for id: 2zKMyzqdfJMry4zTryROlh
fetching features for id: 7lL2lMWNtzOcf5HnEudNgn
fetching features for id: 3j0iquDiY3xVzFqx3xnE6V
fetching features for id: 3pl0zws6JCFYUOuMJQNwB7
fetching features for id: 1wj9ltraaBrktwngkQKfwK
fetching features for id: 4cpaaN9gp0JvTaro27c0ex
fetching features for id: 0VFWAbRoKDIItHejnl8bEa
fetching features for id: 5mSZLysuAtdNA21mbOm0sx
fetching features for id: 0FPSiF70mG3LilmRLnzKEW
fetching features for id: 74i6ZrnR1WzZsuc3qrJg7I
fetching features for id: 3QL1WUVydAYUl5kM2d1K8X
fetching features for id: 7GKlBrdF3WaXjRyH0oQFdZ
fetching features for id: 0T54e5hevBelKdm79XqVuJ
fetching features for id: 1Xf8TOaGT3KlORRABQH4Mh
fetching features for id: 1hFmFWgeQgj3LFkWnFUcA6
fetching features for id: 0Gy0lfsMY9QoPLOKJAhk8m
fetching features for id: 32HYJoyWuZ8ETsj8A5stoY
fetching features for id: 6dKadknfQZ4iM26eI15oWS
fetching features for id: 0fXn4R5nPWa19kL8hInjkA
fetching features for id: 2z5tXhynZ7JULfGxcz1rry
fetching features for id: 5dfHU6igXUSMJPyYjagKsd
fetching features for id: 5sUcX8YWRa4UR3QDyEnhJT
fetching features for id: 48G9n0VrQqGbihRFXIDu4G
fetching features for id: 7zVFxziD5veXw4KcGip6O3
fetching features for id: 6yoa4DtfyMUjEu9GRgUUNV
fetching features for id: 7vJtTE1k2wPaqRmzEJ84OK
fetching features for id: 78OJTuCxtTxNP52dDprvDZ
fetching features for id: 626npgn8eelkfILd5pqSUj
fetching features for id: 0oem2FjDstqU4UklvfIJvg
fetching features for id: 0RRVNsSZED2oGFBKgxdPfR
fetching features for id: 2jSqZlREVLLJQfzOCnR8eL
fetching features for id: 15TITugVC8RrpLctHXOqT8
fetching features for id: 5T2pWL9W3RnUjHgrpB1Mmy
fetching features for id: 4Cf5RigFToa0AdrNDJIVS1
fetching features for id: 4ziizyLIRG1V8caZoz70GL
fetching features for id: 0uM3FQZ0VeqdhA1HJPkRCk
fetching features for id: 6JoRk5bdLx3lzuCuMPmpSP
fetching features for id: 2NoPCsgRLoailY7Sh9nDLo
fetching features for id: 2c4tzfXA33AyGFOSHcZqRT
fetching features for id: 6L0bHTV6hf9UL6uCezlJCC
fetching features for id: 5O5AwtNW8jjhTfM98LmuG1
fetching features for id: 1du0k5dlEtZM6wYlQMZAhs
fetching features for id: 7BUBf9shtJ3NbQgZBKicHX
fetching features for id: 475FJrQVjU80EYAqweZTS5
fetching features for id: 3i4AZ2XuuS5teB5QHGy7Ao
fetching features for id: 7LnXffdUS2wSAw3AvIQDEH
fetching features for id: 6OU7pt11iGYyv9etnaToyH
fetching features for id: 4f4TeR0LtVfsShvbfDlumS
fetching features for id: 0ohnjVj0hHcEwjWUCEmunP
fetching features for id: 7odHgoLFi3GQ90E9PeraI3
fetching features for id: 6pyfy3LoAtJ9lNWN98W9DA
fetching features for id: 5opz11K4IkIVD9gv90MToF
fetching features for id: 4WE7DQcRohC9jmn7tvqH7g
fetching features for id: 2KqC5hNhhuTtDH8TypbOvS
fetching features for id: 0SWDyeULu38YyMnyf6DZw6
fetching features for id: 5b3Bvd2m81gEQRWrShNkx2
fetching features for id: 6i68reGiO50cNQKGKPbFLF
fetching features for id: 2B2K73RtU1ne3e5LlsTueE
fetching features for id: 63qL37oSySINeOqrnIaYTJ
fetching features for id: 0IszsgXx0CKA8ToQn3RoNz
fetching features for id: 6DQxtefRJmQ2aWPJ75IRbQ
fetching features for id: 3oen6iVS2Kwxt2UvalxaNo
fetching features for id: 5D8GyDQid3v34iPeIHcfS4
fetching features for id: 69EILuL34ggLWQsa626bes
fetching features for id: 6wceQSqQDvdcw0cMNKdp7s
fetching features for id: 2FJbY6XaLqldFBHZPZErlj
fetching features for id: 0qglTmOtgHmBoej0iyyR5T
fetching features for id: 7wFTKWaAbePsqBe2H5o6Eh
fetching features for id: 3D1zvyu1WaLFaOdqF98sKs
fetching features for id: 6mu0nDX8yAEELwnBrieAmV
fetching features for id: 5zq1Mib1pAJ5UoqvLR2L0E
fetching features for id: 73yRcc7EFX4uWRmxGHMzAh
fetching features for id: 7hhxaVupi49IEDd9EyRqcK
fetching features for id: 41tl6xfIKib7fexaHudvV0
fetching features for id: 7osQgwNLoDtLFoZJ4Rwkqq
fetching features for id: 4k1KBmLXE1Q1cFyfC6bVLH
fetching features for id: 2STSpwPnkPkSK4hVirHUq7
fetching features for id: 0kirMU1GK9LM4PF3OhJT9p
fetching features for id: 3TIg1dio3AHo3XnzwDlSRE
fetching features for id: 54cqzbNc662Sis4VG3rDLk
fetching features for id: 0AO2AXM7jLm4ZL6uWRM9AY
fetching features for id: 1svB1gR5yFsNgoBxRevt3n
fetching features for id: 0gh9MaV9g5shq3Y6BBTTX4
fetching features for id: 2vn07tyGKthmxKx9H2qjyF
fetching features for id: 1ckcPnfKv7StOuASFoZ5NX
fetching features for id: 6thVOj1grfYYEvru4PSP2J
fetching features for id: 2qHG3p5MBUvTqhrGRbAly8
fetching features for id: 1ZHSmc2m66fqF0BaRuS9d2
fetching features for id: 6UtKnVSvYhNGO0n8SWdpVn
fetching features for id: 4z375DRT2OBS1VpKUdgCZ2
fetching features for id: 6uMR7QrywhdSqhJSef3YqV
fetching features for id: 5azu75l6zBBna2JHBLseCg
fetching features for id: 2fFPjK1sslhH3aWNckp3I6
fetching features for id: 310v4j8Mnda0oAg2E8Ml1a
fetching features for id: 1nxnRYlU4DRfZruzbKYEoj
fetching features for id: 3qxvIhgdd3k22ioRqmgamM
fetching features for id: 4DPTB97GjKGuA9zBp1FhuV
fetching features for id: 3J0syNI2oBf92TZ0xF4FpA
fetching features for id: 6GN0ssRcyJ4PkzfVJTwKSC
fetching features for id: 6eiZdeG6uDUylSH8HBTatE
fetching features for id: 0kfkMmJ5bAsN5ggffJ1PWz
fetching features for id: 4lSnYr1YT9jYgkGHsftdu1
fetching features for id: 1aTwS3pcR1pM9RJ4KqXSDz
fetching features for id: 7II22fb2A8y90vqcLfb2jJ
fetching features for id: 7qA41EPaEQRIIJoEmus3H3
fetching features for id: 5DMTYRfjveX3SWcMtkH6BY
fetching features for id: 4DvAQecueclj7xTZSTlt7V
fetching features for id: 4u8pvyvUdysYaD8cjXKtkU
fetching features for id: 5Nlzr8dH6f2TKayAX9I8tm
fetching features for id: 4h8bpmpsaaDWLsgHktJn5v
fetching features for id: 1AboGOId6vPiP74HGTQhhC
fetching features for id: 07uKak8a1SbavJwBS2moSi
fetching features for id: 0o47lTgeRH9B5Yo97iHSGj
fetching features for id: 6lrYo0LfA4i3Ih65eKI8Tz
fetching features for id: 27rnu200jk1s0OIpVd4Pvl
fetching features for id: 6hkcW1ud6rTmwSmkGYQV8I
fetching features for id: 5aiI6vcyhkkPmsTWsAu65t
fetching features for id: 40uG3QadBzPpToV2FbPtuF
fetching features for id: 3Wdk8wT9uqBVXhwwWt4YEb
fetching features for id: 6qXnH5OmtO9DNzx9jjz65d
fetching features for id: 6ob3S7MjngWrcV377CQkDu
fetching features for id: 6AEs6Jrd8ExJvhRzQcnz1Q
fetching features for id: 5os6Tl9oaIR65Ae93UY9aa
fetching features for id: 5Is0r0E2JuW69TDyID9FVU
fetching features for id: 44Y5Yh5OSciJB6ODlAhlHo
fetching features for id: 6l7eJ98iTfO3hOxXKphHzY
fetching features for id: 2bGqpfhpaU1zQjPGLWFiWL
fetching features for id: 0YcKZu2WVjO0U3vSNAPsKj
fetching features for id: 7abjkQDLaCsDu19I4hQYSh
fetching features for id: 0MffRzAW3tcVsK0aC3ethW
fetching features for id: 7vsIsdegBXHac6FdSM08uW
fetching features for id: 1pAJIGUCD8t0NRuqxxarAw
fetching features for id: 1oWLERlWcdoDA4zJAPCtyk
fetching features for id: 1bhKFWkQ4cQtk9GOgx7jb5
fetching features for id: 1hW5sYd3UdKW3cYvXjgRWj
fetching features for id: 2sZIclzhHlH8tzf7wry9hK
fetching features for id: 6LEXD2N3PxSmu4BMUHjaTb
fetching features for id: 6TiS1jv4msJWoWB35UvJb6
fetching features for id: 4FJLbOUFTNl795TCcutUlA
fetching features for id: 72cSQVBT0Uln5RitecXHLr
fetching features for id: 79v8qeBXuC8IDs7FKAxKo1
fetching features for id: 45dJNtYHmJ5MTmsjZWRN1S
fetching features for id: 3Cxi7TG4eWtw3rDFuQdnHM
fetching features for id: 5qA1ZrjjVLh1cL9GZOFdNm
fetching features for id: 4Ym0HjYfgEaUgjI3WuDqMK
fetching features for id: 6ediPUKjEAS53mhTWmDcid
fetching features for id: 217TWF5jp26a93UCssbya5
fetching features for id: 6zlJXSjlMD0usP9nnPVigY
fetching features for id: 4b4iLxgE6pv8BRudp5wfdi
fetching features for id: 5OYzOLFjbKjvWSq85x3dS5
fetching features for id: 7FNIEfhhBlzsZFxCwI5Nwh
fetching features for id: 6xUFBHDsCLcHoARWsKGJUo
fetching features for id: 75aAJt5ZJO8JWgrGFK5AqS
fetching features for id: 3ox9cbIc8aoKYzBxgns8xg
fetching features for id: 7384ZImepcCLSwbxmX1D2T
fetching features for id: 04RM7D5FZzFahr4L6QOuW6
fetching features for id: 1DubKw67kktIJ7r17nSp6K
fetching features for id: 46fAGJwpTKyaREreMebFdO
fetching features for id: 0x2PeHuAguW1SWRETzSddw
fetching features for id: 2DzQEqnWYDusmybVyphbsP
fetching features for id: 7F5r8CWyMTRXbRZyhVweZX
fetching features for id: 3UMqQ1QTgvAj8FTQMoNPfk
fetching features for id: 0cduoX64cRfzLvdapbCsBH
fetching features for id: 1L4GnTwQq8Uuff376kJDeZ
fetching features for id: 0wKQQZq7U3fztiFUerjSPW
fetching features for id: 6iIZbGAaw39GiCQ1hoPqLj
fetching features for id: 5KVKsVfutYirN4Hj0cfu4X
fetching features for id: 3FpzyU1FdHsTRa3hwv20nb
fetching features for id: 271rEUoF3LoGBHRlkn1ICC
fetching features for id: 4q93XQv0xo4r4j2bXpWnFr
fetching features for id: 6wGXR7V64PJ4I9mIpmDA3n
fetching features for id: 2ff6ILugtshFUiWIVu83B9
fetching features for id: 1nDw1XXLveMXaj2c89ZPXr
fetching features for id: 3uK6SqdNXutPBNi6vUenpt
fetching features for id: 01h0dh08fyqGPofJVCw08N
fetching features for id: 4J3MoB2nVNvXBFdh84mxoG
fetching features for id: 1sCTCUgSakywC950OVIC9K
fetching features for id: 096cHX9f4Ji7UE4MBtexTO
fetching features for id: 5ufRrjEUzSSuXc4nFjX2FP
fetching features for id: 7L4tnCB9rogwiHBUBvc5nb
fetching features for id: 0pFS3h6oeZtNAnOlsNp6nn
fetching features for id: 5K3oepuLYQCrfbEzXEHWCd
fetching features for id: 4WKUoic5rKlAmlgYmpJLCF
fetching features for id: 0O56SgMsaKZnfofiEhlZ8Q
fetching features for id: 4x4Kr9zOCzs8DFI5PXPfOO
fetching features for id: 28DfnajP63VCCr1eGREU6q
fetching features for id: 7kYIMKSzBwvggLcVOXzJZy
fetching features for id: 0YbKyUAst6ZaTtJSOkU8G1
fetching features for id: 686LL450oOx1ZlscdM8JJP
fetching features for id: 31pi9vJJWHZxygk8AoGd0d
fetching features for id: 51sulFzBlSrbU1G2IAbfgV
fetching features for id: 5o8qgCODgpjd5RpFpvbPQ3
fetching features for id: 6n13tRkf6BfW49hd8x3GBn
fetching features for id: 5txH0HbTcqR1NYVmRVgunx
fetching features for id: 2wc9DHQnBTnMKDIYcXFzoq
fetching features for id: 0ByEWViMcNUfQ3GpSknx20
fetching features for id: 1TmQybeHfudIil96VJAQpt
fetching features for id: 6YD6bjlPkuNr4SJ59AB6K4
fetching features for id: 2AVUPbwgQUGEIBdsUpzYzU
fetching features for id: 3rvW6SvKMLUAVHFtoIH6gH
fetching features for id: 2rqqjc9niGp2w7C1Pv3Wj3
fetching features for id: 5GZizdM8UeW2svHkxy2XaS
fetching features for id: 3egChKZ4fIbZpiL98dx49h
fetching features for id: 2zEvB5gYtH1wICRBST2Qlg
fetching features for id: 6qUKvwwXss4ACF4bwXPujP
fetching features for id: 7gxeDaqGLT33dkWSTAEOue
fetching features for id: 4j2ArHvM38V1LxLhhrY5Ks
fetching features for id: 6c05I2lI8biGDG3ououyQl
fetching features for id: 64gpxD0IvZnnjUzv6rCDBt
fetching features for id: 47IGCBcC8P7L8CHlEnzWFD
fetching features for id: 7tf10BYZc7tqY5k9e0mkBZ
fetching features for id: 5iRnkIAcHpbAlygyHwPSzG
fetching features for id: 4eM7bZnwoRQbVXktBxwlXa
fetching features for id: 5A7X68HGl6MzeBQdRQzUmo
fetching features for id: 47bDqW4ZFJybnMwF9wkSZi
fetching features for id: 2xpNOeAiGNwPMYV9MMKSy7
fetching features for id: 6R9EalCA6jU8nk4s6cTPJJ
fetching features for id: 0OlPwJ89MAQFrjw4nAtAPP
fetching features for id: 3I7PM0ArfQPsLWpk4auNGx
fetching features for id: 7Gf8kY2LWsBapoALdpqY4p
fetching features for id: 2v32CGfALncxmdlF4JbKZ4
fetching features for id: 5TNbU6SbJ79dFD6x3uo43S
fetching features for id: 1YlFODzWlgk4EFqlt4PkvB
fetching features for id: 0lF4vTTszEImpNoqM75bnV
fetching features for id: 6jIitOrI9c5LsHLHUTQhaY
fetching features for id: 10llQmNEkgJZ66kWH8EFS8
fetching features for id: 37WK4t8ueXG14q96W7uvIi
fetching features for id: 2FCiuaXNE1kjwSoAvso4vT
fetching features for id: 7oGKsZIfauHlqrHdfFV5rJ
fetching features for id: 07zG1DNaaSwXKNeqxW0fEZ
fetching features for id: 2vjBDckWRwsaMj2SgIO7wg
fetching features for id: 5FQCWsEiEcnILhNHP0DWH8
fetching features for id: 7FXuCkl1kfxwS5icDjTwbM
fetching features for id: 4ViWZ3r1mb9Igr8C3A6DiM
fetching features for id: 19YLQaeQT88GJ6nz1IlbNK
fetching features for id: 1zuqS73L64T2X6Ny3xHqmn
fetching features for id: 545TmKBYJTxxwPRkmZAS9h
fetching features for id: 4Eur5HL2hyJcMVAa9K5QQT
fetching features for id: 086qiBZmxPjVwig3Yy1o5r
fetching features for id: 6bxUQ1ydrmWr3Km6DQQZ5a
fetching features for id: 0CCUVuXep0eemhT4jcm4mK
fetching features for id: 27QxL30kLR6RyIjLXCMFdU
fetching features for id: 5tP3TltMOc2BbyVWyrKbYC
fetching features for id: 6C6m2tqQliQ6wReSb30jKJ
fetching features for id: 2MhIpJoV2oma1wSKZaDJgE
fetching features for id: 5Lu7IAEgjNF7n4s1osxPvQ
fetching features for id: 7tmxs6OLPktPdNVrJgKWEN
fetching features for id: 3sWVdMFlcUzA8p93yIjR2f
fetching features for id: 0aI5KoqucjqXjPi7bFENFQ
fetching features for id: 4UiDi9dIR4mX6wg2f1T17c
fetching features for id: 7MvI6xZmi2650P1qXHgXg8
fetching features for id: 4fF8W36n4q1M81QzR9Qmkw
fetching features for id: 1wGeHxQTeDRt4rYO4tMB6a
fetching features for id: 20WgikhqE0cdDqApl9eVKB
fetching features for id: 6WvbMOjKf5y3CAHn4ubwlp
fetching features for id: 3uBtA78OM8o5HE2hTlKWfC
fetching features for id: 7nEDchhuaVLIi2EEKqe740
fetching features for id: 3BBrBOfTuMwVmQToKWQxZx
fetching features for id: 6ZGHt5l3B8C64NV6uiJmEQ
fetching features for id: 3rc3eKwmp3OqavTpgWGBbo
fetching features for id: 7nN8COgR76WMRhJro3T0nk
fetching features for id: 6ixmQ7kTbWosF5VCiGTnM8
fetching features for id: 2MuUExl0s8Vxsl6t8tZLr0
fetching features for id: 4JPEuPsDG3nNJMAvvQ2Rr3
fetching features for id: 1E2MEAXoC2qgwDspNaE7rl
fetching features for id: 0XpxP4P8NLegFrZij9BYHx
fetching features for id: 1JuJ8KSzX0ke9lvX8AlgVW
fetching features for id: 49iKFDnxIRvohfjUlqtQmh
fetching features for id: 3C9Vv8OkqQwJ0CqB24d7Ty
fetching features for id: 26NysICbeElPyjSqkPkXLJ
fetching features for id: 4RAeYXwaqPS65Nyg8HazdZ
fetching features for id: 6goPukKEL71RzfbSuNdU8j
fetching features for id: 1h9pYichpN1sdLDNjgpzK5
fetching features for id: 6Sp8FtxXBKnyJE0OHwECHh
fetching features for id: 6JBGFlHJJvNtKFAYpchJbk
fetching features for id: 4nGVgxqZPzG5g9Azml2lko
fetching features for id: 3fDkVK8bIbDh3hllMAqVf5
fetching features for id: 5SUq4LjsJjYSGq9IIjEiBP
fetching features for id: 3eNGdIH7IjPtnEm5BZPFBP
fetching features for id: 2LImDr39rmOpTc6jDuTuv8
fetching features for id: 4q605l3PIAX6HTM5bNoObR
fetching features for id: 5ihFDUNsWY2wKPoAis8Shk
fetching features for id: 0oVEzllLNgGRjMK4x1uktw
fetching features for id: 0rMD3QlnXdFRkV4fjvJuG6
fetching features for id: 3UbNeYyyTnDhYU37Q3w2Jy
fetching features for id: 5RoOIuOOyMLfreRSCVbQ9n
fetching features for id: 2pgEabdcVmCfEbhd5MnKJ7
fetching features for id: 0oFvTAHSCi3uBgNRcFr53H
fetching features for id: 2ofrKgfmvJk5qzgfyfCjkG
fetching features for id: 5n691Y1hibDwdxSJwjzwgi
fetching features for id: 3ADQ9jGCJ6MfSbIZqtbVj2
fetching features for id: 75tHHyxJw2u6EeMPaTNXHD
fetching features for id: 4VAAQHZOKUnkRNWKKtS224
fetching features for id: 17ZL64W0e6jQUgJUvqssTd
fetching features for id: 65nKoISRcXlBA4avGaSvo9
fetching features for id: 228jEjW3dgYStlF1JrZfdq
fetching features for id: 3zaIEaOgbhjUvxft0hdde0
fetching features for id: 7D2GAOzzB2pm8DuStFCX1Y
fetching features for id: 70bUKzBh9HImwhrsJJqLaO
fetching features for id: 5k3lU88VEF8WYlzYjkZ0FL
fetching features for id: 3HsSXZiLVG9qFPTZB1xf4m
fetching features for id: 5DrPvVpMKoM4sVkQVGRfa1
fetching features for id: 2gZuPEa5hb8dD6Pcdcuasb
fetching features for id: 315NtKOZZ30iThBRGS13Al
fetching features for id: 6hG59WXc951utGHcrbhr4V
fetching features for id: 1FubJmhgn2HDDCAmJM80OO
fetching features for id: 5aJozu8lCjc1O14LsjNWdj
fetching features for id: 4AhTSXvQBpIFDhotdteOhp
fetching features for id: 6s10XyfYy0X2vQrPAnJ2lh
fetching features for id: 0efxCfcv2JWxhls2LLlndV
fetching features for id: 6vJ7Eh9EnJ8i4RXUV1usRz
fetching features for id: 6v8Sybh5HSLy6l8360GPdR
fetching features for id: 27ItQJCmJ1OiBYHX6WQgCm
fetching features for id: 1bGOcfS5FTFik5fTnWx6we
fetching features for id: 18oX158s1ny8C4Tk7Vcm6l
fetching features for id: 3iJZUoDWu3xtL5QceRsy7X
fetching features for id: 74eNTEW5v73x0jDGusJiPd
fetching features for id: 3oBDHEINmsaU8t7hDWrOSr
fetching features for id: 4gSGdDflijQmFvBNdTTENk
fetching features for id: 5ei8EdWRV6SIHmTCWk47WJ
fetching features for id: 2Ej7FgxsCf7UjGO2pOvYOO
fetching features for id: 6iWaf45YIhj0YT8GFGxsw0
fetching features for id: 3BucMqBqIR5Aw7MrUkF00y
fetching features for id: 63UCz2y3eAmYkkuKZ9WnsT
fetching features for id: 0dWbZ75ReAZ5LUcuIIoLjv
fetching features for id: 7kAf7rFgUNxtCZWB1DHoV4
fetching features for id: 7knoxBr7kSFcmJtfp5iIOI
fetching features for id: 7vLulg0d9DreZngW3SsPvz
fetching features for id: 4JIWmTZIy485xb8h2FVUXy
fetching features for id: 3diwyFfg0tWx8Z0j92oSZj
fetching features for id: 290atote8p8Bs95ATeHBGD
fetching features for id: 7DwHL5o6nscK81sYJrbDbO
fetching features for id: 7bZD4PsmyKpgKBXMBWq2A2
fetching features for id: 05WuemBRmew7sHqNbipsuy
fetching features for id: 7h5mrcoymubXxvgTBO7a1x
fetching features for id: 49EoBTPBSmPUeRmPTJsIBX
fetching features for id: 1bAhMCb5E2jSLHG3uJX50N
fetching features for id: 4UZ5xERMJAskse5AH8dNb0
fetching features for id: 3MKhV2pyf6PR4G6K7GsIUH
fetching features for id: 0WR2ShQK2lmbjiyXw7nanO
fetching features for id: 2ItFgr7TEyipBFkiF2Mr3l
fetching features for id: 3p3X7WlD0GaEGIH3mEvIiF
fetching features for id: 5Odfit8lEljfa3MsgeUmX4
fetching features for id: 6IVV21D3ry6O4AZ4nw5qAX
fetching features for id: 2RAVnOzE7SaOGF3NsGwXnR
fetching features for id: 2IL8DHt6a6aoIakPqKA4Me
fetching features for id: 2kSaj8NoWz9n5ry4iMDVdz
fetching features for id: 2TdhHtNXvGwbVSkZSQ5Olt
fetching features for id: 4dutGV1hlN2ox21WHbUjYp
fetching features for id: 25nEGGqcm7rrz7yqscoPCo
fetching features for id: 7bP5O5IYUIycego3IHsvra
fetching features for id: 2rjaHAF0MzEpxVVYkh8Bnj
fetching features for id: 6qZGAqcEYUt6EE6gI4rSr5
fetching features for id: 4nFh6WVXQU2vgNZxAvw6tV
fetching features for id: 6mTsWgHqjyCgVk83J4qN1N
fetching features for id: 3vWuaWqY6vzsyWF61AE3WG
fetching features for id: 3AlkJsv9PE1LOAvO5Je4SH
fetching features for id: 4l4CZeKh4CkbVi9FlMRSkr
fetching features for id: 4hIbkMIG6fVN5R1LzMcol7
fetching features for id: 0oM0bKrgOfmoOjAbZVVy4r
fetching features for id: 39Qzggm7Maml8J0LeibnY8
fetching features for id: 0rA9NWth0fNN62CcpCPLvr
fetching features for id: 1fNGwzjBYbL6UnJh1mcbAN
fetching features for id: 0Cd6yJ4fgxPq3dTxDhPhIB
fetching features for id: 5HNCDalJlhey3PzJqSA5Jr
fetching features for id: 4hIjL7PzSYdbMJpKvbHyxx
fetching features for id: 5DqDDxSBUjo3WCl48jY2FC
fetching features for id: 00U45bMLJ8QM1kwT9i6cIL
fetching features for id: 4KLxv1kzZobz0WaLj0Anrt
fetching features for id: 4RJZSPNQcA7PG8AtNTm4fG
fetching features for id: 2uYkzFqBU0g2AVztIaa58z
fetching features for id: 2KukEvIw9uzm5rbfvtAsae
fetching features for id: 6nMWFSBfyOaOe1UxKhL7Gb
fetching features for id: 5z4iT44mMHyZozsTFy4A51
fetching features for id: 5m9TJTcMOzwJEkxEEmSBmN
fetching features for id: 4FVq4f9tepIdeEmFX60HVV
fetching features for id: 3T2vA5VaDmZjvNIxzr6u94
fetching features for id: 31f9rGz7b2rjAv7tg10iSp
fetching features for id: 7FkwSaSBFvLUWCWlpeiV6h
fetching features for id: 5RcIofhXhi7CTrmNXCOn4G
fetching features for id: 5MrmMiU440CvGVgegeZx9g
fetching features for id: 7nxUD7qBjQnCqQ84UvfMdW
fetching features for id: 0XLXN9FPQIOl9v7J4TrrF5
fetching features for id: 3v6QaZYVz7ozFrJfoP7NaP
fetching features for id: 0a4pdyzHJzLhY8YWEb72GX
fetching features for id: 4PUjy9xBqDl5ppbALCj1Hh
fetching features for id: 4KVEHkTYmCxYwkat9eniQs
fetching features for id: 3PRQqvqlBunArCpn4Q3kbl
fetching features for id: 35hMprxd8xjqmVo1Ss6R2c
fetching features for id: 33HKkBoXOgay5WwagMhOiC
fetching features for id: 1jRnSVA3PfS0BhoKylM4E2
fetching features for id: 6DyGA9SgVADFAJsduweUFO
fetching features for id: 6kni0aRaJX8NNu63sfKTmr
fetching features for id: 2sAWLmLGNXNXWtCw2iwanG
fetching features for id: 2Q4RjDSbiWutacquLvOvTu
fetching features for id: 5w9C1f415WEIBm6QKyZCHv
fetching features for id: 55l8C6rfUm3joc4UdoPuxW
fetching features for id: 6tIEW4jhonURp53h9Qc8wX
fetching features for id: 4UcYefRknctfgciUVIAuU8
fetching features for id: 5A3cnzVRLXUH50LVaQGIO5
fetching features for id: 2TMm8uoO8g80pY5tOzkPqU
fetching features for id: 1kkyUSwuSCkM8nnbqBdtEf
fetching features for id: 6X0aRgGbgXYFqW4NKS5ON6
fetching features for id: 2fR8EWqgUc2vzYDWaJP6AN
fetching features for id: 1WkYJqmW5MVNPF8YPt448h
fetching features for id: 0bbazTsBa37DXVBl3RwBfu
fetching features for id: 3XQ6vzfo6YBa95XUhD6wdN
fetching features for id: 3jSLgt1sgejI8CafIOgM5O
fetching features for id: 7IKziXch2FOV1GPXsLom6l
fetching features for id: 5ZMrCeFSwkn46QuzKFLul3
fetching features for id: 17JmNaxpPRwWb9ZBsfRoky
fetching features for id: 3NB27dEoSKEOvD5YuEHQEd
fetching features for id: 223FlHocF8gVAYwIMhQ2my
fetching features for id: 76gntP6fUkwUokmxWcASok
fetching features for id: 4qT8AobbFK8hDQOlHBWUMj
fetching features for id: 3aafnVOz0MIxHJqs93Ffon
fetching features for id: 6QFT6m1sqcgsbLNCnDk1vV
fetching features for id: 5dcT1Muoh3heS3a4H96J5e
fetching features for id: 59DZoz3ExJi9oCzt4VtVZ0
fetching features for id: 3RfkdH5PGEn0pJpbV4H8qp
fetching features for id: 6sGv0jL7wEH5ikjjoSpEcs
fetching features for id: 742ePjImigmiox2eD6X4pP
fetching features for id: 6dCcjqSwPI2HX5dU5xqTFr
fetching features for id: 5KiXgwrhSRlkkzaPpvPQKu
fetching features for id: 6ykLs6tGHh7VY2mVTxjD8n
fetching features for id: 3dR4PkKZyL2aQLq2MWhCWI
fetching features for id: 0EkQJ7hHSnnMDXjYbn42Jh
fetching features for id: 1FPmEIIIhzo6lSUXPoxVuS
fetching features for id: 5o179PzNeHCNvveju5xa1Q
fetching features for id: 6sSVVxnULOpq0yXOpNaTqO
fetching features for id: 6KOtheMY0KN4s9TrQHr9It
fetching features for id: 4ti7Us7isw8IRrVONcmKUA
fetching features for id: 6dLKiUywNz1HYP6hk7n5IU
fetching features for id: 0NTDwtz4DvAizJ5cM80s6f
fetching features for id: 69gTnRx9jt23FdlaHlg2Sn
fetching features for id: 5jvFzNBMhSHxlt9P8BL2aU
fetching features for id: 2gciyJOs53AUEWbvErQUzY
fetching features for id: 437K6n86c8sfJiUN9x8CI9
fetching features for id: 6zrM3FSFakJfv4X25pjgCW
fetching features for id: 51l6HLDjoQw7qDfbKTBqMT
fetching features for id: 6otraxvXsHr0s92xdV6kSm
fetching features for id: 2KVOEdNDWrMfc5AlMEF0lm
fetching features for id: 2GvZf1c1Lvw7TZhyWw8QzJ
fetching features for id: 05YQqgKIUvxOVqs9A2lHm0
fetching features for id: 0Cf60FPZbeZa3PKob5GKbM
fetching features for id: 4sX1ZaiFTFbU8omeUCXX8M
fetching features for id: 5OQONjZd14iiZERbc4kHpP
fetching features for id: 2BS8TsYnXlFwyzCO0IAywv
fetching features for id: 317OKJNq79y0MU6BjEh7dD
fetching features for id: 4BQVBVTok2VEEv5qyiOB3G
fetching features for id: 2htH7r8oqqEwp3BB6oh2cU
fetching features for id: 79unP2UOhDsow26wIW4jdz
fetching features for id: 7lbPDDVBPdsOVTOzu7KQq7
fetching features for id: 6jLz0eS09dZ5nV5Q8JamVX
fetching features for id: 09v2waPISsO1PZR36VEzpQ
fetching features for id: 5YRBpKTf5lKBy9jYS2TDCn
fetching features for id: 7M7VNz8MndGPQaZ1vLkiZh
fetching features for id: 2yglo7I0FEWVvu5yATre3y
fetching features for id: 7Iay1Q5jEjHPxJbmGPOLzV
fetching features for id: 1oqpdSW9YqEvZ4LRkQsXNq
fetching features for id: 1RdFoJL4vjNXinXA2iSgGu
fetching features for id: 1rkT8caKhdEFENiqips7dy
fetching features for id: 2qhXUDLja1d632GIxRtm1z
fetching features for id: 69Q47Mtuk2FJY9ijuojlXc
fetching features for id: 2dZjrzmulQbM5AQjS0J6ms
fetching features for id: 75FCuXUiquMZsMqSj8rigm
fetching features for id: 3QMEgLBMIWuvOelcq0h9qB
fetching features for id: 40pyP2adDdcFnOO74zo4XG
fetching features for id: 13xYdYhHZpP1qOH0JBkqkS
fetching features for id: 33YKKW1MaDUwq7GDw3WOqR
fetching features for id: 7JMdrgAYUnYLZUcSGiOHcH
fetching features for id: 7qdKwbhXsyWOHekUw7xbqC
fetching features for id: 3NLcMav3rc0tlbwO6sXg8v
fetching features for id: 1lcUS7v5FdoVfx7aEjhjig
fetching features for id: 0a4B59pJDQl549hdArfFVm
fetching features for id: 6fXsuHWpbQJDU5i74BfwN7
fetching features for id: 1uXZrI0WYyUvv25FAsI13h
fetching features for id: 2HLjjlg4hfvrgjashHVRmK
fetching features for id: 6adDLR9RTXyi4DMRbZJerC
fetching features for id: 7d3cKDG22Ig9kZOS1qsq3S
fetching features for id: 0vi7ozUKrpaECgVAt2qOTC
fetching features for id: 54y2tSK57KkTjeGZnKpDcX
fetching features for id: 0BH4wsBNL5xfiFsuCTl3Sq
fetching features for id: 69C6Doeyve2lM0afZ92n87
fetching features for id: 7KpERdL8FSWPt0Pc4Nmrcw
fetching features for id: 504UR8XeXpyemvSxw050WT
fetching features for id: 7oDywJMdbNtPpUwDUG38c5
fetching features for id: 2dhe9LLgTXg0vOwgqOsmw9
fetching features for id: 1SCnrq7aAvg2MIddieSQ9n
fetching features for id: 2fbMIIgUDUcbdspYZKnACp
fetching features for id: 5DwTG6MGx4w7KC8arkeEWw
fetching features for id: 66rjZFghoArgSOjbIpxLCs
fetching features for id: 4fvQp3R9NGjkQiPXbLkAN1
fetching features for id: 6Lh3SlKQAe86VtJpOJWqTt
fetching features for id: 5e0t3fyvsDApza4UTP0Tiu
fetching features for id: 7Bizf3PgirXJhkSTgyjUWC
fetching features for id: 6Pqji4iKTll0YO2HnQQaXL
fetching features for id: 7H6URjJKkBh6fE2Xvw4eMM
fetching features for id: 6gu3Kv4Wtcs1JnQXUQmRbH
fetching features for id: 72EkpsMgMnerGyouKRRN4N
fetching features for id: 7FmJekHUTWjaAzATpJpZeG
fetching features for id: 6IcU1BradM1l3E3IdCWSlA
fetching features for id: 0v3ZTOZRhfM7edUg44GpaE
fetching features for id: 1nxxYKsraVrPnNRzISliwM
fetching features for id: 71QDMjqxSXYBiCQeZtJkPf
fetching features for id: 4qrkxnQx4hEBKzB64Z6ppj
fetching features for id: 64SBy7WqIUjmJKUTBZnAY7
fetching features for id: 0iQ327wLzW0NYImYdL7W7A
fetching features for id: 002Pxn2yZnK9j2PlemH3yG
fetching features for id: 1LrfhDdsKPVYtdtodBrA8G
fetching features for id: 6MSsopsVefnA2alhuDXpyv
fetching features for id: 4UywFB7lROzFHNj7JLoWst
fetching features for id: 1sLciXCWuOQOTVWlvSt82B
fetching features for id: 1h4XVjt26ZCDKmeEoHxT07
fetching features for id: 1pSVZad7Vt9LknqBMQ1Ndv
fetching features for id: 3tkSyO0K6n1SPwgJdY2dn4
fetching features for id: 1FEPm43R9PtGIFnCHqZc6R
fetching features for id: 7crxZRe5zcCaLb4RtaPrgd
fetching features for id: 1HfDPwJVY6ReeQ1AtA5NiL
fetching features for id: 76TIRbq0sMsX2RIq1KNQI9
fetching features for id: 6qUlJiUDeJRwxCrHngPHXe
fetching features for id: 4lHkuSUGjwstBAJoimtjnk
fetching features for id: 12eZu2EWV12MBXhoNSFqc9
fetching features for id: 4dnmDSlbtn7cm9qXpG01mg
fetching features for id: 4WSc4jv49Kj3ZqcCUiWH9D
fetching features for id: 2J6aSaz6UOHjgltO2w1lkc
fetching features for id: 5Wq13fV6xFymI6pOEOPh9M
fetching features for id: 6b1ZEXdMikajKUfrLzgOwj
fetching features for id: 1brOHEJcktXiwPbhECMhaB
fetching features for id: 5ejhLHGjm8AYIKHv01LC1K
fetching features for id: 1iCBYtztuUEZw9HDuzneXd
fetching features for id: 2CbCoTobw9YtBLm7Rej89J
fetching features for id: 3T8Ixv0Ayl7M8GlhYBmtFR
fetching features for id: 0gbuUe5b0T5er552R4PR7b
fetching features for id: 1N7R2f8HEZea5MvgBvQFqE
fetching features for id: 4TBBPZks71c60whhq0PgdP
fetching features for id: 68vs9rfQBpkn1EDgC0nBTJ
fetching features for id: 79qnb4XTD5WxJ7t5v7594W
fetching features for id: 1XTzERcWA8JFv7q1jHdFI8
fetching features for id: 6DpvREtNKnyYkPgv9RHHPw
fetching features for id: 36mdlSTkI395sFQlPQL2Ls
fetching features for id: 29bU062RLWPo6OR6rjonrN
fetching features for id: 5i7qzYnqUiv1sjlr6uKtbl
fetching features for id: 5v3lNECv7F40DKpKIFaiNK
fetching features for id: 28fUeM8vxp0zx3NnjCguuf
fetching features for id: 5LME1QLKHnNtrIITsZBT9J
fetching features for id: 2QFhKYOGopPjjL3ODvi7Mf
fetching features for id: 2lBuql0mYYMfC83ppuQEnE
fetching features for id: 6bGpztCCWPPk1Xck6ibpZb
fetching features for id: 3VarPeE6u3uTvDDECUvsLY
fetching features for id: 1pw0PCc3LHtSQjYwRo9WqP
fetching features for id: 4AzN9nuystYVb69dRgzyZE
fetching features for id: 4PxHDMW8qjBHMkoY3oHn3G
fetching features for id: 6EhLNFfi4feTg0JU8VyDAt
fetching features for id: 2pRxtlVOGHlcmMM1csPN4m
fetching features for id: 5idn7EhxgDzCJMjq5chRvF
fetching features for id: 39LSfgIQHJwKi3gBDXmcwi
fetching features for id: 00JRruCr5M0bscbDVt886Y
fetching features for id: 5WOfxDEsK9FLq1hoSZqo6m
fetching features for id: 2nmteiJeK6RAIGH3pL9Pmh
fetching features for id: 27jKL4imo6MxLJuj486nte
fetching features for id: 6KZLiDQ8rxyKJD3eS8wC0r
fetching features for id: 12hAeEyERydkkb1Wow1xdA
fetching features for id: 37zdCyboXQ6nfT468R5eaF
fetching features for id: 5VYHTkkwE9nhomsREveBcv
fetching features for id: 1qr4063nTkNEjc78KpjXlN
fetching features for id: 0xAqhyWvTkfcdFgm7dozMO
fetching features for id: 5AUb0sDQfwDmlV16vwLw5c
fetching features for id: 7hTW791CEdFSlw2wouCVYP
fetching features for id: 1TUCxgxczTjQ0dsVnxsIsa
fetching features for id: 5kygjg2AQwPZ5t0qlaxxNu
fetching features for id: 51hn2IWRbpSp80INMvD2wR
fetching features for id: 1JFfCawXkvyuX7xpFslcGe
fetching features for id: 4xdFoaOLN76WpOoaLOPydK
fetching features for id: 2iIeQdi360dvjmjDWrvryp
fetching features for id: 3swpc7fsvdAkib6qIS7K4g
fetching features for id: 0SbZEfOvogkUI7AmZHbR6s
fetching features for id: 3qCImLo5lahv1bjlf5vz9P
fetching features for id: 495H4Urq81O9juThGEPALc
fetching features for id: 5LkazMnksUZfqC9E5479sH
fetching features for id: 0mbS3VwRbO6HVBMPXnzOGA
fetching features for id: 78ZqE2tjAxbqEGGlvGnQfT
fetching features for id: 18kKQf4bOwY0I9FUV3ne1q
fetching features for id: 7udDurbLqcmXEGCyE0b8SX
fetching features for id: 247LjOYYHS4qdiv9u1FOxH
fetching features for id: 42BaDyvQp6yBrr56xoO1wM
fetching features for id: 6ft9PAgNOjmZ2kFVP7LGqb
fetching features for id: 3UXQiopMR4H5TFvz61XPey
fetching features for id: 7gen818QlnY5REVx0D70mZ
fetching features for id: 1yXzgdB3Tdd986SDY0ssZd
fetching features for id: 6xwrglPlNAUiiSjTRxD6st
fetching features for id: 5oPno5EuUJ3zFdx2IOyG2u
fetching features for id: 3K75GsqYHIPJkU29CLxLeR
fetching features for id: 7e4JOggoGV6CYBUyl1NuO0
fetching features for id: 0h16mEAACVZkQoeOwbxtZP
fetching features for id: 1Bsg81xYF5fPtIQ9iUF1cF
fetching features for id: 4D49SGpnyxN4n2j5jLaJ7N
fetching features for id: 0KHLV5TPrJBlOjBWdFwZYB
fetching features for id: 5RvHBonRc1HfpOlf8jpA2L
fetching features for id: 2n1NPWcvShZKTDT1aZNoYu
fetching features for id: 277pHePclnKynKdg73t9pI
fetching features for id: 2X0AB8xDyNe4iupYJVOIKk
fetching features for id: 6UhCn5qXW38VC0VzEgFqXS
fetching features for id: 1wUleNxYgCwy0GZtndVd7z
fetching features for id: 0AEwlgUZ8zkVH2WEJibDQz
fetching features for id: 3WWznQqOGeE8WVvr0s8zBc
fetching features for id: 1RknXtekvGbn57eru2vXjd
fetching features for id: 18y85RXNwPsaqTwKPfTqZI
fetching features for id: 0T6SH2vVwZszg8qLc2RGyS
fetching features for id: 3O7JSI4JkRIicn4W2H8Aoy
fetching features for id: 39hq2TGyGMM4lKlb7bnGqw
fetching features for id: 2L3tzoilZgW2DUD9XWOuWi
fetching features for id: 4dDfEtgHqFFPchNhDd0CBJ
fetching features for id: 3unFRYDotoc1a2vR08T4yN
fetching features for id: 28zYLv6vZVQPxmWeCRMwk4
fetching features for id: 4ygQFe0kTwEQYUc0SjMr58
fetching features for id: 605dIOk2fnhJHg8SbgCYao
fetching features for id: 7azOozeUjGM5QCWfy2ZvaY
fetching features for id: 0DmsCdHUEqiLhl9oSopVRP
fetching features for id: 0abnVktJLMXYOTBP3qHxIX
fetching features for id: 13ZqCFrJ9aHlG6BPVu2sc5
fetching features for id: 0R3flkZBGrHE1n7hHGNwLU
fetching features for id: 5bfL1VNWxv4SpGJcK3R4q2
fetching features for id: 7MUq6RwnqJ2MASNieOZINI
fetching features for id: 0TMHOFGVHGk21BX1Jtwl24
fetching features for id: 15sA2sh9yeG8g0oQLxzRl0
fetching features for id: 5mGUVta08LnMoQ7NUNI3f7
fetching features for id: 49wmhN7AAF7acdnDYlN8dj
fetching features for id: 6jnSWYnmGFj64jeTMMk079
fetching features for id: 6H9qqWROiE9ZnktW3YrH9b
fetching features for id: 0PKZ2sB3g7PjgFcAqFRPrS
fetching features for id: 5S9LsNRLqqZFiDSBMskY72
fetching features for id: 30nBdjVsKGchhjSy89mv2N
fetching features for id: 5eeZr5k6eprE1NToNTZuxs
fetching features for id: 5mF5vKAxLU0LHCQQ8eP4Wv
fetching features for id: 27Zxj2jAlbu9GlhRqDB3tR
fetching features for id: 1gvTNXAtuLOQrsi6viPcsG
fetching features for id: 4EHD0TJ9HPMLSfiHI17tjK
fetching features for id: 14ysBbruzodLe40AaoHrGn
fetching features for id: 4wCzp0vWMEqBaLSHBOCQQo
fetching features for id: 3F37xwBMpBxOqxQ7MPqArq
fetching features for id: 5BJIZTtGWujic8BKV8Piww
fetching features for id: 1aJqvwsyVAPi4BpMpO40bT
fetching features for id: 4R6n8nSg5yIaiy9ltdKV0p
fetching features for id: 1us3B4JET9DGY5VLcVYTAQ
fetching features for id: 1DpvgmahJSLQL33KGaXvOi
fetching features for id: 63Zb0epU5ysyvDa72Jm4I1
fetching features for id: 6duLDfk7RheKAup8VFt0P1
fetching features for id: 1hh6PfvINPBnKplONjgju4
fetching features for id: 2DhCsY4wgd0hRBQsbdaVeB
fetching features for id: 7s25THrKz86DM225dOYwnr
fetching features for id: 5kr5oIPoXcl7eJXHRQGyoS
fetching features for id: 49BkjVIwWuO5lk23uhZcJz
fetching features for id: 57KyBPfnROUOMBvyQQwnei
fetching features for id: 1f8H6K29EalspWxrA7KDWe
fetching features for id: 0xv72Mvr6QvsjOYbzbLetG
fetching features for id: 3z0iSr9ZUx1XPy8b6JjSIU
fetching features for id: 0hfDyQbnvDdRZJsjtwUegG
fetching features for id: 51wC0EhWxyaSCIvWCvWXQv
fetching features for id: 2lHSVxYcOrvLpK9pgAHkDf
fetching features for id: 59IxJQuFPmW4Xvy7GynZDF
fetching features for id: 1kJ2VFimNH9RYxloaXNWde
fetching features for id: 3eTdybxkhPXIHLu9HPxenw
fetching features for id: 0Z7a70FevCaQINjlrsVZb4
fetching features for id: 7bzliWon3VpMR1guX1HlOf
fetching features for id: 1BO8bCK5u4isl5RCuPIuKB
fetching features for id: 1ZUgRUFkCfim2RYhUFr6Jy
fetching features for id: 6BuOJEMm9bnlf6WCwRW9PS
fetching features for id: 475HKqM4Fk9wHZ0NF8DNBo
fetching features for id: 30dIevLsWhw7KDU2i1RdJY
fetching features for id: 0wmWvw4Y1QTnB5gxwC8A5D
fetching features for id: 7fp41IDi1dqARoX0Nuvz5O
fetching features for id: 4V3KUwlCYLJXDhpWLIfYVp
fetching features for id: 4xTO6qvRnREo3Axva7SGdo
fetching features for id: 4GeDvtCKRWFi6HtauLMM0G
fetching features for id: 4xcf2N9xEZ7ogjDIEIOxG1
fetching features for id: 3hwIo04AK1DFAC1NXxNWlE
fetching features for id: 5xshoeAeMRgUbXzfHf4eDs
fetching features for id: 0yY93L0Ligj9McONit3FCI
fetching features for id: 1AZQxYkqHKPagjHyqfaYtg
fetching features for id: 72OfN6EHGzIOizvzhnnhKT
fetching features for id: 1bOyh3hrYncnNNg298H6au
fetching features for id: 2vlXMXWgrqWgTmi0CtvZbS
fetching features for id: 5w4KVUZN11WOhTdNO8Nj6r
fetching features for id: 2SusUbsUQnw8OJDq56ZMbE
fetching features for id: 0zV8gHHcSVDy9czQWrztC3
fetching features for id: 5Pr9yvx1SEcZUexWe18I0U
fetching features for id: 4rOsz8T9kaSJMBrpX0bNiq
fetching features for id: 0zLnCSOruJz7QsamiOzhAu
fetching features for id: 5UiPIUAEy5MsBeX6eEdR3U
fetching features for id: 7mnlWaXynCtCo1I0DiINXA
fetching features for id: 75T7h6o8n5YZoCHqV6fTfa
fetching features for id: 2ZVN3aLhzeet3uepP9RdoZ
fetching features for id: 1INEmvIGCk92Vg5Bk4VfWY
fetching features for id: 1x4cWY0XlKELUqGjVYGn6V
fetching features for id: 299l2wkME9A3Ofbu3BW9xY
fetching features for id: 1bmcrUnBHBKNI3R6ycO1Cc
fetching features for id: 0x4ijFipSxDfNEge0bpkk1
fetching features for id: 3DKUkv9RxYY3807Wetll2a
fetching features for id: 3dixl1MZ9qOXybfCjNX9oC
fetching features for id: 4E6yRmGk4v1bcMhQbUuv2s
fetching features for id: 6Mr7Q7t9D7apHG2IxrNHfs
fetching features for id: 6gBfyxWTkUFscxpcz1Yghj
fetching features for id: 5fY0sxLDOrM38whFo5SMYO
fetching features for id: 5VhOpe9A0GQyiVT6kLaXsF
fetching features for id: 0VRBhWj4AlA4F5gitwGKnD
fetching features for id: 1hlzWy4ouz6nOCnmZzrItn
fetching features for id: 2LUPw0EB4c2t5zoRmhG6h3
fetching features for id: 37kJWFpOSGIF5CdKpmNMr4
fetching features for id: 5xOE1ynrGbFxlK0mL7gNib
fetching features for id: 393xYitpT6bPYSDucBYLnL
fetching features for id: 1byNQfcvDyC851g5MSW0N0
fetching features for id: 3SsHe99t3EN5k3sEu07m2p
fetching features for id: 5ly2ULtvOsRj0O7TtHVktQ
fetching features for id: 5w1urYjnY6qkn2vF4GQSmS
fetching features for id: 1Joe94gUml6QWqLCXl9c7U
fetching features for id: 5x305I0eh5PQuo7DbqvRWd
fetching features for id: 0MbawDKcfL98znAym9cFVj
fetching features for id: 7KKCysLL3WwTRZnyGWVeZV
fetching features for id: 5eIAJHG0lgBCkREwIGRBU4
fetching features for id: 1yexhSDARSLVvRCBU3wDAm
fetching features for id: 28st5OVMNqrgTkIP1IflEi
fetching features for id: 4IjpmyTLiaNIffjkax72mE
fetching features for id: 05gxElp6T3xNGpZDq0tFOQ
fetching features for id: 1VoXsGqFp6v1GYEgyabCnY
fetching features for id: 1FJOhsLu7mhIC7SfxwurCO
fetching features for id: 4uVFUMQ8WZyhpePcAD533r
fetching features for id: 03DmfO0mdYu56ycyLWTe4P
fetching features for id: 4vsJejr0UiQDppgqDlQVZe
fetching features for id: 4dSNyAD8cNB0JrOZWnAQO3
fetching features for id: 6VAgpbCQYVMKzr0ZfaGanJ
fetching features for id: 4CsGhF52AzvEefojFCQ5IW
fetching features for id: 1ibHIWQvenBe1p9NsrpWFa
fetching features for id: 533yUtsvWXcOI5onoOzag4
fetching features for id: 0rGpTr0VnfnWBEgJGE0Ahq
fetching features for id: 1eGPKo73n7rFkTqmQCXq79
fetching features for id: 78WAUYG6MhoZxSARsK7mXy
fetching features for id: 7qTsVCOqCMGLu1GzzvlLrN
fetching features for id: 6zMKLxHp8DmqqPchnIldQj
fetching features for id: 2ZlpHzD786VSWcPc4mdLDX
fetching features for id: 0T7zBRcchKYBl9g3ZZhyf4
fetching features for id: 4cDwwCJi1s6SzxUL3zgPhW
fetching features for id: 6phgexl36yCRsg0XDDEzbq
fetching features for id: 5FOh9frnN5RMHgVOTYoTGI
fetching features for id: 4yt90IbMhzW8ed7pV90rJ8
fetching features for id: 65OrMO60eZdpH8FTpHdUvK
fetching features for id: 0NxHOfxuXgyxPUsieiU27l
fetching features for id: 1EXKgXQNrETEGqRjcFRbPZ
fetching features for id: 3RSj1QA6znFbSkSFIZG9f3
fetching features for id: 2c3icRUTCk1STapXM1UJbB
fetching features for id: 6uW1WepoIJD03mTPSIGHgu
fetching features for id: 13O3q5OsehC13t9tU6NSbz
fetching features for id: 1HAf8z5UiqPxN2tY47f7VM
fetching features for id: 6nvGeEsHWysTGxy6gnhFP4
fetching features for id: 4eZ8w3n8xZDAVveSBYK9U4
fetching features for id: 300qXG6Be7OeOIVCFuk2rR
fetching features for id: 6yMy557ei2JnMR7boHTlX2
fetching features for id: 33vOmVmYIgn4QGUc0ooNio
fetching features for id: 1g0W5WDakVdYBD2RFlo58l
fetching features for id: 3MbJTwduZrrkp6Uk5zc5jj
fetching features for id: 4gQ6oyK3y4UZoBmvjkNr8q
fetching features for id: 3dodhTAqKdlOPsx7kqY3sr
fetching features for id: 7aRDgHf1tmDTdmtYjbhyDH
fetching features for id: 0DpVkccnFxFLYE4QQy2mwn
fetching features for id: 10h6Y5HHIFM9KhNSxGi6e3
fetching features for id: 2GBC3K6SsGu8GISqIVgVeU
fetching features for id: 333fBWrLeVkYf7uf3LwSLo
fetching features for id: 7Ai4qIKyxcBCcKAz4AtGxo
fetching features for id: 68oPRfiz4pLUmnm5mlyyF4
fetching features for id: 64WgJBmkXXasoy6gpalNuI
fetching features for id: 5TbHBh0nrOLkgYmIWk7Q0l
fetching features for id: 6N0QR1bKO0a5OROLT0MWy1
fetching features for id: 6lTQUF6os6Y400rkjWb30B
fetching features for id: 2Keex13ovGMjDvO5H5vW8R
fetching features for id: 4UnF0EOHBJOFNtynLa26ba
fetching features for id: 5xQLjEHGibyfh8YkAtMuWx
fetching features for id: 3im4hrqpF3MW0uTtwuNHxZ
fetching features for id: 4zYktHYolvTl8CNUPa14mK
fetching features for id: 7GykWKpUdiwoYLCACCpmjp
fetching features for id: 0Zql5iFzkEvV89QfgSUHMH
fetching features for id: 27JuahbVzPdLctRmgBkTc5
fetching features for id: 5yGpPtZdgfL2jD8i6MX7tE
fetching features for id: 78siU3A3EIZgcNhKV7JGzm
fetching features for id: 3RnsnfrquBAy6kIejQzc2C
fetching features for id: 5Y9St7Txe6OT7MgXknJwDr
fetching features for id: 2tuWxZctCHB4ETgPE8rb34
fetching features for id: 1dEXwXtwiYlP0SFEkd7JxL
fetching features for id: 2b8sqtEfTU0EKTZJxm1Isd
fetching features for id: 2tdXsWXJuZeCJFvJAiTSIj
fetching features for id: 1StE7j5oQHwGnUQy1RUo4z
fetching features for id: 7y5bD8RMEoJYgCvghxACjf
fetching features for id: 45JOdGtN1kkmeghHRz8OMZ
fetching features for id: 2OiOI7sppJH3oDmafgkGiJ
fetching features for id: 4vCAFphCJ6Gn1MtP73paCd
fetching features for id: 3QjrJuzcTnwTLDHZSaoGcg
fetching features for id: 6Ke3G3jvDM0k93bvkr4mqm
fetching features for id: 7D9ViCvLC6atoIV042u0Ta
fetching features for id: 2MdXHo2uhdGMksRnUBxmzI
fetching features for id: 3mabymD8ortwKJfJQ6nVR2
fetching features for id: 2AqbuxkHkZ3aSlY65rvjHp
fetching features for id: 2UnrpDZ6RnMYq1iwzsft9Q
fetching features for id: 7C13QJB2jagmGBK7PS18Z4
fetching features for id: 3M9ecJ710c7RrwqDZhb7mZ
fetching features for id: 46rtoQbjaiQBN35fwXDfjK
fetching features for id: 3Caj3EuBDu2cpjpFNNTXLL
fetching features for id: 2DyiLNW9M6mn8GIZb1fOpX
fetching features for id: 1tGoMICXf2B4xz2iWJeZWG
fetching features for id: 2QZ3TaDZsHEFPYBxBWel9X
fetching features for id: 6QeBPf8oEpIU55p1LqiUJv
fetching features for id: 21yKY6JxjiR83hwhwe2VgR
fetching features for id: 24wd3JuMe658XEGV2viGrc
fetching features for id: 5FghKDW0T6ARG4U3Ef4x7N
fetching features for id: 4aaf6Me2nRCDWRajaYJdNo
fetching features for id: 4w6KzdRpUwyKfEf4vFLeiI
fetching features for id: 5ObPTCyOhEk9g5XUepPxGx
fetching features for id: 3MedejWo5Q7hYLR6epEDTp
fetching features for id: 4wowCtWxnXTLJw093mh1fl
fetching features for id: 3566Hq03kwDoZipjf1ROoy
fetching features for id: 4m4vlae7fycsqeVX7W4qkX
fetching features for id: 4dp6dBDTRVQS7UKRuZFdOl
fetching features for id: 6RXoA806Vy7clqPkcKiIjx
fetching features for id: 0ydH7amZ8mOIVE2GIfOYx1
fetching features for id: 6cahHUfSQDIB8i0Yx3srwx
fetching features for id: 0gl74IvGW0tzX1Cw15Thic
fetching features for id: 6pT33ADvrAnXzNUFvlrRdQ
fetching features for id: 0xTkZjpIAGYvK1gvUmvIMT
fetching features for id: 5HbRi1fwxhZArT9IubMgr8
fetching features for id: 4wpOxZQmoxrHtfQQcR4PyW
fetching features for id: 65s2XkRfOUdAadGgA9bjGS
fetching features for id: 4mSfwqx71pxE2PeYc7bvr7
fetching features for id: 6JSrZlaGV7Xle7KJhGikRq
fetching features for id: 7G4ggNSeJG6P82Yk0fvvdR
fetching features for id: 19eolVl0e9EDVl4cTRSZti
fetching features for id: 6Tl4lk8QBmrZ4xD5TbHEiW
fetching features for id: 15mNqJkj7JNSzgYuae5pPZ
fetching features for id: 1CfeKBehU6z7lZ9zO0kf5g
fetching features for id: 3jwPIp1n8YrKcEpKB4PJoT
fetching features for id: 1oMs2rmpTZW8p9f9zL2clh
fetching features for id: 6gk2ARZmiYtXBnqYmh7H4O
fetching features for id: 0EDDzuxDc8oBkXV4X372Az
fetching features for id: 4Pu5jsCCU3CbOYGAInjLpS
fetching features for id: 5ouMGeOtmeHUfneTwgjNWC
fetching features for id: 1Vp8fFUPiBLc6LuSemiahz
fetching features for id: 0W7QzopfYsOv7YpScrDwAY
fetching features for id: 6E8XOuqYdDNtxIlxowWeg6
fetching features for id: 5aYnq06qFFSHlUDISQafgw
fetching features for id: 3qf40dVHsTi3XWmThoCMom
fetching features for id: 4cix9zymmhisLuM56RDcB7
fetching features for id: 3A5N7BB4q7pARAey4si83V
fetching features for id: 0GTT7x9HY0WYP0E0TkFwRx
fetching features for id: 7o3mquXtovl4Lg2UwmuF9w
fetching features for id: 2wY8TkRMnTNSVhSDJnFHTg
fetching features for id: 7eotSbuPkqE4DkvsSZLUM6
fetching features for id: 3u5NVpdpX1yj6QWkjeZiqc
fetching features for id: 2Sp9MHp6VRos9kwvUWpUlp
fetching features for id: 5dKfQoMk0Y5GKWikhiHQiM
fetching features for id: 5Sz09kaSzvpTC8lgm5W8Mt
fetching features for id: 6AwCMQu47EyMU2IbzhkjlL
fetching features for id: 30bxwrpUzdHZYMslOnmA2n
fetching features for id: 2X1EonkN2OiF2tKyNEuzHY
fetching features for id: 2gofnl02nfeNFuXZdYroRC
fetching features for id: 3AuPRF3WVgtCNvvspIWc0Z
fetching features for id: 3rQzv2lYriRlWITedMsECq
fetching features for id: 749S6pIVPtnCCq6mzufmUd
fetching features for id: 3JBFfG915JxanyJSEDP51k
fetching features for id: 19wFpRBbJhCqXJ13V6Yw7n
fetching features for id: 3gD8PjbZRQGyoZ5KjXCM2N
fetching features for id: 3Lk0Fy1xGEQ8xOQWukyzRP
fetching features for id: 1nneC7m6oPIo1JuCXPZyHa
fetching features for id: 62IemfSmPDN0S5tSxhNab0
fetching features for id: 6VOZrmZE0URu7E5LHaVT5W
fetching features for id: 4eQ7kb8hyNIYEPuPNZ9RoN
fetching features for id: 77L13Hpqe4bBcWL1BHJxSr
fetching features for id: 4ytbzwn1SkPxYzz21n7Zoa
fetching features for id: 58wullwJKOgd2ePaso2WlT
fetching features for id: 3RB1zMyOLc5MvwUJeLN6Pv
fetching features for id: 213sXSifN2KluzqglSDQTm
fetching features for id: 2N1T9jGtCOcyE34fAdgPQC
fetching features for id: 1aeDg6eS1NeGNu42l9qYn4
fetching features for id: 6H8WMHCov3QGaPLbpOMpcJ
fetching features for id: 3Htxny2KBYt7JaSnQPFSnQ
fetching features for id: 4t3lklr6YGeCK4MXfGkKBl
fetching features for id: 4aaQpHCvL2jkF0tJzXxvgB
fetching features for id: 1hTRmVmVaHSUHN0dxph6FI
fetching features for id: 75aFifgT6BZY7h7hkEPWVU
fetching features for id: 0YGCSXIPs8VTUZmk8nDpdu
fetching features for id: 58smfvnOg9Wn7koHxsvJ8V
fetching features for id: 2N6pHH41hTjwXpOwnsTLCG
fetching features for id: 46rJEodxZ0CezzTS5Uke32
fetching features for id: 70DZRMZUbE4vLcNnwEpbNb
fetching features for id: 65t1qUTFzrl94IxuDq7oQa
fetching features for id: 7nYBMEtMVLMAJQMpaQHULU
fetching features for id: 2Du61OdJ7CXmUBG7FaZST6
fetching features for id: 120BvxQsMS1M41PdBClcxa
fetching features for id: 3HfXCqcFDH5Oz4O89O9Mtu
fetching features for id: 3koPNalAOcO8orv2MBvxen
fetching features for id: 4PEeZ2U4UfP2Jo8EtIOjus
fetching features for id: 7K6KRwyvE0plDEzxrVmCon
fetching features for id: 0dmQv5F4dm9nMxX8zz2x34
fetching features for id: 3FRCcvlS0KFKqKI17bDZRK
fetching features for id: 2CHBo5ozruzhxgaRHdyupI
fetching features for id: 6zGufyo5txRnBPsGm51Zc2
fetching features for id: 1LziKsFJM2A0Uu68gtKWGH
fetching features for id: 7x29Vj48r9cPuQCEC2VUia
fetching features for id: 2RJPho1Ex5yhAm4tr76DLp
fetching features for id: 4zcfNc9jBdX9BPqXzqPSZi
fetching features for id: 4rwqam3s5xzOqOJNyqwCZU
fetching features for id: 7lhPwKa37sjV9eqWouT9B4
fetching features for id: 6W9uexj1gEJDab6bcWV7cA
fetching features for id: 1DOQ4CSjYKvwQbFK0lGbj5
fetching features for id: 0UsAYccEydT3AoDZi6wXwG
fetching features for id: 58fnw6HtW7CwDXVrvXu3TB
fetching features for id: 1MeMuueW7NPiy90BweHDhJ
fetching features for id: 4SGAlTbZ5Y6GeNuRGWW6KV
fetching features for id: 6aIt7na6SlEAO6lk3BMhK4
fetching features for id: 1VOa4gAfFLXF18Fda5SnwJ
fetching features for id: 5q5oUwH8wxSbtztFT8Mwv0
fetching features for id: 2HcT0IGf27ov1w9ncWtij6
fetching features for id: 0HkFBtJT19oKGvK2nhs88Q
fetching features for id: 7mGlt3k3luo8Qt69BxxZp7
fetching features for id: 31vgSRfK5YOGSCAfj6zPAS
fetching features for id: 7LWUt6WCJsTUBjVl2GhpOK
fetching features for id: 5I04kxoRbmyz6WfSjE0jWT
fetching features for id: 177tdHqrRwTCUhBwTpqQrn
fetching features for id: 1L34KjA1nP5BA7PLCaRqUg
fetching features for id: 12hAeEyERydkkb1Wow1xdA
fetching features for id: 6cKPKg0QjeY7K3F6EHgdg2
fetching features for id: 5trFoWc3DxknotgAxtYkFl
fetching features for id: 3pBtSlaM0qyH8mLXzVeUhh
fetching features for id: 02PSTB9Cu6AQccSrMZKuKo
fetching features for id: 2Jymfq9aYchbP9mSTp646l
fetching features for id: 24tCr5aP2Mjbd2C0tZb3XL
fetching features for id: 0dmQv5F4dm9nMxX8zz2x34
fetching features for id: 5Yt5e7my4DliCa64DK2Ovd
fetching features for id: 7CRVGxQiVGc7zXxUvwJCVD
fetching features for id: 2oWu4i4rYXIs37J1X3epmn
fetching features for id: 6iWLaVM5A60iR0z9nEcG6A
fetching features for id: 2JfAqeGWP3cEDOLosccf7R
fetching features for id: 1OppEieGNdItZbE14gLBEv
fetching features for id: 0R6jYwBiYBFOcBMqD9fZnA
fetching features for id: 4UolfBGFer7I2ldSh4GMZi
fetching features for id: 3gwu6u6Zc5V9Y7TNqtbhLd
fetching features for id: 14SxSIUPNX0uWokgXwTb4O
fetching features for id: 03vxPxwlOOa50NAbb2zGmK
fetching features for id: 0WxPGuKaTQe20nm8UAAohJ
fetching features for id: 7ki09wB8QGbf6TGrHl8nHl
fetching features for id: 5Tf6XOWk492HEk6Wk7DeAk
fetching features for id: 7rh95hfK1OEgDL6gD3nqVw
fetching features for id: 7zySrJDb2H7fn2kF3t9odr
fetching features for id: 5ldK6ZO1TCLm3hfB2WApxV
fetching features for id: 7MFMZ2K2nJa6ztUcR8LznM
fetching features for id: 2GHg4ukhI8ubzHe24YL8mR
fetching features for id: 1Qf3si9AhOhAgYkF1md0FB
fetching features for id: 6SSBWjGwdBmwhmtOYVL0Pn
fetching features for id: 4lgkyAvwudA9fcjmOKM706
fetching features for id: 4RGxUjaCmVtD30pIhtEi7w
fetching features for id: 6j2ydq5grFnavpTXTiCYed
fetching features for id: 1MwH62ULXoq0m4CShyUVje
fetching features for id: 3L2BfLNQnVYY8J9Y0quaUA
fetching features for id: 6rXtc7TXUn0d2V9xPlJnTb
fetching features for id: 6IcUdlgT1l2XwSYGFkx5YY
fetching features for id: 20Rx0z4HQqQuUoN8Ho9eX2
fetching features for id: 0QWHVRBV4XYd8D2X1jxgDH
fetching features for id: 3UppV186Y0plS3C68dvHYa
fetching features for id: 1OPINklujDatP2f2s25Pyh
fetching features for id: 4qK7JRcMpizOH7YwWNxlmF
fetching features for id: 5GR1Jj5ahZtoR6WqyM5LP4
fetching features for id: 4gySZmwNJcFvVUR0SaELUK
fetching features for id: 5YTfDljsmdlwuQiUZXpNnf
fetching features for id: 6iGU74CwXuT4XVepjc9Emf
fetching features for id: 6RDuH8C4yjdxSsjOEn65IO
fetching features for id: 31VnXPM48QGMyUmVnVtCAj
fetching features for id: 2I5l8CQz7qdJGWzIiQAg1j
fetching features for id: 13t4VV3Y9PBk0OnAAw03WT
fetching features for id: 5XgUHbrBYYsbUFXn6Zsywv
fetching features for id: 0ncVdfMgdTdSirbhICZdYE
fetching features for id: 0RlrRftZDNtuHYX7VGWr1Z
fetching features for id: 6E9zaZ0Tnm9Np5XkfinOQq
fetching features for id: 7AzFID6u1b3zIWbd9pb8Dk
fetching features for id: 5RQRPai5wx67G2P1kbBdwv
fetching features for id: 1fjORL6GPyVVwc9rjdfgm8
fetching features for id: 5MzQw6nOYKXylsoA8vtI60
fetching features for id: 2b9e6tO9HYGTg0WaHw3X3p
fetching features for id: 264kU95bSEp3GYcUUPUcpn
fetching features for id: 5DJ4EhPZ3i6sQ7fAvAk9Bi
fetching features for id: 5AHtv2v9nvvvyPugUytAxI
fetching features for id: 3HdwtikIeE1NfXwfgUkCsP
fetching features for id: 4QA2dj0lI0us0GFYa4YWa4
fetching features for id: 2p1ixDAqMAdZaa40a2PdmW
fetching features for id: 28XwCRRmoFTPHpryWVyZaj
fetching features for id: 6YH8kXEqVrUXzQ6NiM4yze
fetching features for id: 2I07mm97gsZtLdF058b3OX
fetching features for id: 7nHpusEs6QsRM2dDQVsuDo
fetching features for id: 4qH8fQ5Ub2r6llHnrRGM3w
fetching features for id: 4BDzapvxHkr8AKU9G45ERV
fetching features for id: 6XssRDT7fq7e08bZbMxPoL
fetching features for id: 1A41xE0cj2n6ryb124AQeY
fetching features for id: 4ZxUICvETa5hE87tSLJYQ3
fetching features for id: 4LiEdOeFleIJZXy0mVwuOq
fetching features for id: 0neniOq88zX3RLJmYffzjc
fetching features for id: 7tibCehUdKMeJABU2GHmM8
fetching features for id: 1f6JxG6MNzILprLk4PNdi8
fetching features for id: 5WeHEbyAGCQ7fB3IF2FeD8
fetching features for id: 1C1Ou0rEJwjpZZgdaQjf0y
fetching features for id: 4B19PdOVRx9jPrf3YZSx4l
fetching features for id: 0CXWq42BW38Kwx1IjTnM9Q
fetching features for id: 0u5J4IdTy7HSloMZSdsClZ
fetching features for id: 6K0LVSEiy6aEfnefHWVmnk
fetching features for id: 1DTydXUp7Y4InWSM38rLuI
fetching features for id: 0jvb9XyXRNHQItRBjDdj4O
fetching features for id: 3DBdTT9nwUOw4ENzumkyWi
fetching features for id: 0wvcESUuD3yKzs0PAVpMiv
fetching features for id: 1tTCibOHzIeuwgTe77I2TN
fetching features for id: 25nU5mxSzlzyOXzeqx4c5j
fetching features for id: 5AaOHeUUUbapFNn4wHrjWA
fetching features for id: 0UFIypTuAtPrmBVwjdA6VA
fetching features for id: 36bdQ20bomVkGt6FGD96lu
fetching features for id: 2sTzesoyRqdQliEdc3WSoT
fetching features for id: 77nSOTGJoNEfRMtfRMybI6
fetching features for id: 2FGnxdx61AQcOupUx7Sk5p
fetching features for id: 4V5RFkTNDTGjkHEkg31aQQ
fetching features for id: 4Tn0B9HGfhYT9rq6uKdDjp
fetching features for id: 7qykdbkxp5BN1EBpRdOLfs
fetching features for id: 39HkG70nWuTZk9ymknt7oX
fetching features for id: 7inxB3CvYZyZb4i8KODD2s
fetching features for id: 7juGsvjVcJUfPxJ98lQZDW
fetching features for id: 53YvEcjYsIfGUWW8ZpKh8Z
fetching features for id: 7BDkpsS8gPjNmJWcrIVXot
fetching features for id: 7KP3jPXSIjB2xx6UifWQ2V
fetching features for id: 4rz596X40rwNfOhwTs7dgI
fetching features for id: 2UDvFt9DihkjhaNY5Ak50d
fetching features for id: 3UxZw2CfZ3dmNIii4CxMtG
fetching features for id: 20XHrBOCXsubJH6Bm5Mfxp
fetching features for id: 2VFlsgiY8uI7mN3nzXBBtl
fetching features for id: 3rGQZbNH41FZEwkxfAY0nf
fetching features for id: 7Djn4EfJGHXgJ8BLdbHbAh
fetching features for id: 3UYALm8BNblDexrC4pDAb1
fetching features for id: 6ozkTvpSzrRK9vJCMksza3
fetching features for id: 0mABWoxXTuxGvqRY1Wfwzj
fetching features for id: 00YhuN9oOmXUyLQiHjXPxt
fetching features for id: 3qhakj4od2kNX9dMGyRVWV
fetching features for id: 3FuF9ae9kkhpV0M9Za17aU
fetching features for id: 3fMt2d233szyKPYL7Znact
fetching features for id: 7CzsQaxOCUXNLrxQ3uTENN
fetching features for id: 5RxxO4kwxcT9VjrlPzhJXJ
fetching features for id: 3FvcomFA03nobW9h2lKo7a
fetching features for id: 79bkwA2bUvuxYxAYgKY7H9
fetching features for id: 3VgSSANMXHPNgkWTJDJYAK
fetching features for id: 1jfcjRGW6y4qnlOGchGqyn
fetching features for id: 63T7DJ1AFDD6Bn8VzG6JE8
fetching features for id: 5dAlp0tZLiRoUZnPVhT099
fetching features for id: 6vc4xOtDRq6kDB2iKsfbbo
fetching features for id: 7c8fTb0e6BocxwiyRq8Gz6
fetching features for id: 3CPNbSbr5N0p32ACDKq30h
fetching features for id: 2Q4rbHZ1qltrsAbX7nYmlW
fetching features for id: 3e3gnYm9b4hoRaLwEQPQJd
fetching features for id: 0PrXe1sVQoYMeP70ZjEyP1
fetching features for id: 6oP8Ps4asdwDGhQ59t6UFA
fetching features for id: 3jwjRiAGKizbD4Ma00UBUe
fetching features for id: 3wHRoSk4VkMgB7kxY1lgzt
fetching features for id: 06YcuDoEZvMcxgzvtzJTz4
fetching features for id: 0ExUux7mwrd8uMlgNL5lhY
fetching features for id: 774FgCxWI5rdmLbjJn2lfj
fetching features for id: 5Ugpl0C3Syh0ndKivv6Yr5
fetching features for id: 23DITgKYutJaurSN3EAZ2Z
fetching features for id: 4naJ8j7Lm2XeH60VBX6Q7V
fetching features for id: 2xOEmK42jfYwj0biDzNlW2
fetching features for id: 5GLr9bkV2czl8o7zJoWXca
fetching features for id: 6fuqcFBVbRJx7F2D849fXh
fetching features for id: 7GH6f81ZIjRF7MDBsqTsEV
fetching features for id: 6DfQ5isWcsabpShWiMhBpP
fetching features for id: 6ZarwCcXk7Kn9ZoE3CJWF1
fetching features for id: 4QWIK0sLhlT2J7DOuKdijP
fetching features for id: 26GpmXvGfnsvBkLu0ijA5v
fetching features for id: 1nmnm9oqkHn4jHMU7ZPVF1
fetching features for id: 7jpbe27ryBnqLSIoGZ2CZC
fetching features for id: 2xUPazvz0vm9MwHArnzFea
fetching features for id: 2PFqgzG65dvpdsdIkMzPkA
fetching features for id: 2bTx3OevanlUx7beo8YJsN
fetching features for id: 7aQ1gzWP5UXI56c09Y90v5
fetching features for id: 07GpPzN1AAk1mLBLpnK12b
fetching features for id: 1IDnb9YirBHT4OpQTanvWv
fetching features for id: 36tHtmi6ReXOPM3OJ0HUNb
fetching features for id: 4URcCVFgWv9cWZxgjhveFj
fetching features for id: 7xpfik1yecW50767Bfx8lP
fetching features for id: 0nc6Xu6bjDZz8UTyU5WHBS
fetching features for id: 3u7n1FGFhlUEZrjCmwwepe
fetching features for id: 3g9731HiHbDvSwxVf7R2CZ
fetching features for id: 6cGWnETSKdUw8JU8MxKv5W
fetching features for id: 5O0ibxlu6WLiDr7Q2n8EuI
fetching features for id: 3EQM6RIkUZ3nVI8mTVwIvY
fetching features for id: 49CnPV5v4T9AX2rubSK5S1
fetching features for id: 56aCFmwTdm1me0suarfiWn
fetching features for id: 19jo0UT2vqD4pNVfIqTy4R
fetching features for id: 7EiWrR27y0mfzEJrsoSwQU
fetching features for id: 59HTJznkQkbBnfpe9KhIRJ
fetching features for id: 0RZbhSQw2Z7NeOgzi5Gt37
fetching features for id: 1B0YNcxRYQ4vJIcFFJRCt8
fetching features for id: 0kYqTzbLCdlEdAL3snnZHM
fetching features for id: 3H10ZvoLhe5sHpTu3slqlr
fetching features for id: 4fSQBnovCFWkLhMbWkmsWc
fetching features for id: 6Y93DEf2OkUgksFCGHr8Zx
fetching features for id: 44okWRzv99N7MzyYiC4SRU
fetching features for id: 1FRVMsTQHwcDhZBBDKfhyT
fetching features for id: 2GEW7EFcnqHqugI6KOSPm8
fetching features for id: 5Qm5fwmi7kq1bIVYFcjDap
fetching features for id: 3OmHk1OUKWqtZbcRhupyis
fetching features for id: 5RODyaBQJ1CpksgR7abWb6
fetching features for id: 0qlmIziH262eMI6L43l5lX
fetching features for id: 13BBUy0iYfRwruBRbZqKME
fetching features for id: 7GEL5dZbLQhJGrDtfPeHHz
fetching features for id: 7ES3QjxzmFKMJWt90giLsv
fetching features for id: 2DS20dL4AkzYS9yPt4hnxv
fetching features for id: 46CuB9nNPwY6RyvAoVRtjD
fetching features for id: 45uZPAPHtLjiDyD2fYpp5G
fetching features for id: 1157yaYcDmuqBYm9hSqbwF
fetching features for id: 78T592KXsizVSvYdTldh8r
fetching features for id: 0ruUoErhMN7ySRw9BAkepv
fetching features for id: 0BhbbbNTK2W7voVHSbR1In
fetching features for id: 3rtLq4R8PM0HlPXZ8CU8qY
fetching features for id: 17QOLAql5iYwYquHZS0eon
fetching features for id: 5vFWE2UY9rTZ5sOs0MbPhu
fetching features for id: 2xgqgPLi3K47WHjECz88IW
fetching features for id: 56OqFabpmkMegY248eoSm5
fetching features for id: 4zZ1AsMhireArLtGjmANGp
fetching features for id: 6XB9L1nzsoHvB2igECVjGe
fetching features for id: 5pRgsodSwssdCAEXnY1cg3
fetching features for id: 1k9rx6o8vBnaWTBoYsjM68
fetching features for id: 0DR10fyEwXFUfgqbTksOvK
fetching features for id: 3c06k9x6MHHdPQi3ix68yF
fetching features for id: 6EdGkH1mk7dKIgc3CKgo7F
fetching features for id: 54dLJWk0He1SHiNaBXUav1
fetching features for id: 6pJAvmUYLOyl2gHBUuShPt
fetching features for id: 4iVf2zAzVu499RwIov6uyx
fetching features for id: 18F1Hhz5HmbFoCVLM0D2fW
fetching features for id: 5Hife6QE6Hnq72Ims8CX96
fetching features for id: 7yTUoUbNaVkzdWTj5nr882
fetching features for id: 45RvzLqo8e2rqGPGQRFbFm
fetching features for id: 1EKCl4FD0ks1mqzYWMM6h2
fetching features for id: 5nRaqQ7rhWwZR7lk75qqLa
fetching features for id: 6N5S5zo5l63PEmuhyCbwjl
fetching features for id: 7d5UJFsmgvLRPxbMPza0Mt
fetching features for id: 2T2edhKwGuVF69i45qXkto
fetching features for id: 7zd7QV2lCDGGmwOxMiULVY
fetching features for id: 21ElXnithj4MX8E4rg5lh8
fetching features for id: 4dihWVHHo4eDBrRTEa4kAF
fetching features for id: 2kGSHT10a4pzedty1OFpCW
fetching features for id: 22bA5fQMVqBAQMDjP7Z3nO
fetching features for id: 3osIiNeeYjPJrIbl24qw4G
fetching features for id: 53qO57yx0LA4Cotli9WlLl
fetching features for id: 2gGPw8qbUilOhiSQRSvZG9
fetching features for id: 2DTTay2tPKDKUb1UYDgTnC
fetching features for id: 6o2FF4ArSHzLaLNxWxUndK
fetching features for id: 4BbLz6pVP5oXl2plR7HLMZ
fetching features for id: 2ptYBRF9lnToxEYQ2ngx4k
fetching features for id: 1klqBqLXdUN6ilIhtOAYtm
fetching features for id: 49tqyhwvGuhjFklMq6BMBI
fetching features for id: 2pIhOwhpKagalKdP89hMEo
fetching features for id: 71PHf4hLbw4LnQ02N8Gx7h
fetching features for id: 3jMfXcpWVX9mJmnzO0hEVh
fetching features for id: 4aHueicFyTxORTrQFEV9mn
fetching features for id: 6xzt3DjBQHIIrPIFGnZDEx
fetching features for id: 1cHIHTExDFGCg66uRwPeF4
fetching features for id: 3x7suPNmMkXGLBLMi0cFEg
fetching features for id: 7KYm266nz6EmqxfQpLUYdE
fetching features for id: 3qVrXbnbOeRqWOATjFb6Oa
fetching features for id: 0liRKxQQmW2b9wDVwtBFAR
fetching features for id: 6MWZvL0YOKvr3mB2iR4eT9
fetching features for id: 7crgweiPg38lEwViU8EPPI
fetching features for id: 4YehhvNVDkI2DNwrsR5JJc
fetching features for id: 4oeOG0zp2WYyL2ZQKD1gbR
fetching features for id: 42khap6heGqCfJCJje287E
fetching features for id: 5Uci1t39DsLmb8TjzdPdHI
fetching features for id: 7CnIx5LpNWIUuz4H4u6xgS
fetching features for id: 4AODrv2ts40UxwukxEPY71
fetching features for id: 4Tt2lvnrYwql1HvXYgXZmj
fetching features for id: 6bX5K0pwOdQAGUA8YT9TLq
fetching features for id: 11MPqDv9WZhq4Ma6cT2SJV
fetching features for id: 2OQhFe5jjTnMWaaDRZxz31
fetching features for id: 6dNVdzsCLEcKhqLxoB3Qc3
fetching features for id: 6o8FFvTMZFCCf9fB4bhXWn
fetching features for id: 4iK3b3u7XdnC677gp9AaG9
fetching features for id: 1ETpePXk6Epk0vT0sx51cs
fetching features for id: 2d1Qvf7lYynPE0UpZhz2ke
fetching features for id: 3QlrXlXuaBLMYGmlroFRTV
fetching features for id: 4KQL1NUeDs5P2TrlyPDmqh
fetching features for id: 0YpzbiqE1OG5kzVgP3Nkln
fetching features for id: 34l5f31YtT0WFj1mY4QswG
fetching features for id: 0FESeUvkf85Xj65Yv08Ovf
fetching features for id: 4CglcaCdtisnk0sq4LBFDD
fetching features for id: 19Zq3eSNhf951AwGn6XnNQ
fetching features for id: 5pzoNtKCAXQQnrdupb5Tt3
fetching features for id: 2j1GzI1DwpEado5Rjw0wxk
fetching features for id: 0o4pKAWsrM8YKfkCC5Xjxy
fetching features for id: 0oJo99vN0V5kFqxpLhiPXa
fetching features for id: 4bkVDSiPvKuUflJTRKkOoO
fetching features for id: 4qxmUFf9zE803gfmaHBmuQ
fetching features for id: 4zgVGCEzik0fdeYK5cQXUY
fetching features for id: 7FaYqYa8EBuDx84rFDO9vW
fetching features for id: 2XdDxK8mLhaOdXPBpMxOkV
fetching features for id: 7pBQDWVN58i618PhJpyhb0
fetching features for id: 4E4bW7wqx3nADox2kEVlvq
fetching features for id: 5MUM1P0T47W393eK02eCI5
fetching features for id: 1ibxkpxrMnq9CgBLSvFs0q
fetching features for id: 4ISir5Vt1QWXPU5Yy6an2W
fetching features for id: 2PVIDKZOTFRi8KOkuXB0Br
fetching features for id: 5A8dh7K5qdUKd0Tm2EIVsI
fetching features for id: 42wIlJxGshiaqqJlWtYBnn
fetching features for id: 0Naqxxo2gB6JniDzZGbOHv
fetching features for id: 3BPN4dpI5tyFYfRKNXL8Df
fetching features for id: 1V8AAdohEteFlvjsNwHsP9
fetching features for id: 5DkgAX62bM7gwYpOGtOGfK
fetching features for id: 32DkyRGUJoW6tlpUrRsOcr
fetching features for id: 3JAJE71YkaRAiTDVnOW2AA
fetching features for id: 4B3ULO9EbS1epBNIF0gwNy
fetching features for id: 3unSsfih6sEp103NgfYPke
fetching features for id: 7L7DgjhqnYYuIfgyhKTcxs
fetching features for id: 5rdLwV3jCeV8fTQRCFJeip
fetching features for id: 087wJuMxOm1YM7hhT2h8cy
fetching features for id: 6LVQN95MVvXSvDL5cgcHsJ
fetching features for id: 2QG1xxuz6QDTiC7QmQgWMt
fetching features for id: 6gyszE5aRg1bBcbh5Dq9gP
fetching features for id: 06DIV8OvK1jeGmV6NhyY0f
fetching features for id: 6g5NZg2VXJobSX3TmXq9nQ
fetching features for id: 5HtHlrt5pSGt3diFKBaJkg
fetching features for id: 7khsvqWYjms5UASgLb8bft
fetching features for id: 4yrTrpb8liAIFuA6Qlvqum
fetching features for id: 2JVPFUQ17Hap4JxMbUGfb1
fetching features for id: 0J2BwhULmcrOkRmQxcZQWN
fetching features for id: 2r7yNV824MtgwOlMAiBHiG
fetching features for id: 4snR6myh4KgWiZVdK5925N
fetching features for id: 2La69C5wSuMVXUG5k1YurV
fetching features for id: 4WohEROr1v5VlrkgmnCHFS
fetching features for id: 0bJrWYVfCpObiOpeN0TUOW
fetching features for id: 1z1WImpyDPcQIKdwseBjN0
fetching features for id: 5HveO08yOVzUGTs48bYzU9
fetching features for id: 5eV8oYrOrgh4ZSs2UXFqqs
fetching features for id: 6cyuQP6SxQkYiF3zLKQL8Y
fetching features for id: 3PWGOe61M4iHWPkhOo0yoT
fetching features for id: 56tIxOOatOzEc5TGXaKc8V
fetching features for id: 55h9R6rKBtBH4XY6ozo7ZM
fetching features for id: 45F49WgoqhwZCq1qtXi4KF
fetching features for id: 2k2JskUXczr5haGRV03CZR
fetching features for id: 3FcLmMZL5VGmAffR0is85y
fetching features for id: 068Hf6m0UfrAuTHHlCpW2B
fetching features for id: 5pZmKhBISjmTWzjsp1uMR0
fetching features for id: 2ZAziniVKKou2xrgbJOMfp
fetching features for id: 2GKZrOJdhUM0qbYdNCeO65
fetching features for id: 3jrTfnVy0xQ6mPJVxnuUUL
fetching features for id: 0Ah3HuQ8uhBjJwshu7tZiQ
fetching features for id: 73ugGXyBy400PGhs77uelv
fetching features for id: 45ywLbW1vB67t4koU30J1b
fetching features for id: 6JqUPWwbkWiQX1ItuOJYqG
fetching features for id: 7CHZdfFTezbYR1MOlCbWmV
fetching features for id: 4QRG7p25g5m4vRK8ec4yGe
fetching features for id: 7yGR8R1HgQJeW6s3KRuyGS
fetching features for id: 78eqrJFbfW5WRWVeM3KPPg
fetching features for id: 5qBqBdfTEIWJwAS0Jm2F5R
fetching features for id: 6u0vJG9SG59dFRYSwuhiKm
fetching features for id: 7cdRfsOoTmHBOBuhOC4Ezv
fetching features for id: 3a9JYMtoe0NeZwRXqLFQSr
fetching features for id: 5AxThlogq1ddfJdhMetYpq
fetching features for id: 6fZUCxyaLxjO6B2QJyTHQA
fetching features for id: 2VYvVNJjbGuyiRX2tBPVkm
fetching features for id: 0nGFjSLODdvEj7npKMU8GG
fetching features for id: 0NHtbgj4l2RDAYIFLJyVnc
fetching features for id: 5jlpKLVutiVG9f9CMYkJRF
fetching features for id: 3Vd7lnnD4xihfGIgqNDo9B
fetching features for id: 4mkkncTtp9mVQneL3Q3x1W
fetching features for id: 4ioBCs2b4q0p1zuzumbdg9
fetching features for id: 4rTszyh3bsd6xy6UpXsFjl
fetching features for id: 5TWgV7rwXT9xUEne2Q3g7T
fetching features for id: 6ddXJQ5oKBSVzp2VY90ptO
fetching features for id: 62k7oaajknbA5be6frZMZf
fetching features for id: 3y86HtbwgvflMylhzBcF9m
fetching features for id: 0c9ckWyi7iwVCieZy7jixX
fetching features for id: 1DqIFLNEGRCGfJpkJzFX0G
fetching features for id: 45hZUh5U0OluXkyj9omlXI
fetching features for id: 46VXY6ICU226D25CODh6zr
fetching features for id: 3LPxVIQ5WXVZxBGhrA7opC
fetching features for id: 0wX6mflFA4a72SOcAkDYsY
fetching features for id: 2Bi0JWdxDMaKPanNofJSdL
fetching features for id: 2nqCncFzp4UKMLMxWCI1JL
fetching features for id: 5t6q0AC5pTGxCnLSg3qu4i
fetching features for id: 2VreadsZpksgUYCgEvW046
fetching features for id: 0puKKRrt0j7O8wDroYOedE
fetching features for id: 4gjUJOXFg70Uuq12EAC53R
fetching features for id: 4KYnkrjlSAWwhUzPV9ciT2
fetching features for id: 5SaZMyjsjVR1YH225iVFyy
fetching features for id: 1UGRl3kBWlc0aEe2jafsyZ
fetching features for id: 2zyR4szo2EukeZkCRw54ET
fetching features for id: 2eOrKHcAxMssTrxVjT8MF6
fetching features for id: 5HSr7hDQL4B2UpzcoNaABc
fetching features for id: 6YmBllj632vtXTj2kQGI7N
fetching features for id: 36kH5d8egSQ8Se62A63xA2
fetching features for id: 47OvlSd10q62DwR8QawqOq
fetching features for id: 1gcY3t8AODYm42HsxVPo8R
fetching features for id: 6rDzwydTebaXai342YicSg
fetching features for id: 1NXfLEz3TRpf9ctJrOEEjP
fetching features for id: 1iYrvHaZXNrLolzjR6ZCe5
fetching features for id: 6YNRrZKHLr1cqpo3cOy8CS
fetching features for id: 2lZTbu5C5I5P1C7jAXO48P
fetching features for id: 3RkJsA0JxopALjE3Nkfrm7
fetching features for id: 4BZo9zSqVl55H3zEuQKbkc
fetching features for id: 2RPCzaGjubftL5XB9nmAJ4
fetching features for id: 03wrQEmLVQsiDemqaZm11Q
fetching features for id: 6x5BiQwNlbtisITsEHa8Eu
fetching features for id: 1k6Gsz8nnZThZ59ZsEjw0r
fetching features for id: 7q0CKuWtZgTwOaNWDGtGVf
fetching features for id: 6j0OVI9OAeu2HJdkSNVxs7
fetching features for id: 5hv401ZISKkQ7tOIagk2L0
fetching features for id: 29Xdknl9fhRsV0oOYyQOKy
fetching features for id: 3tuup6TfHtbNdqlbF01HrX
fetching features for id: 188nnxepHwA3d1TeWixUOz
fetching features for id: 41nHzM661MpSZ2pVIXdZhB
fetching features for id: 5WQZJeEGbFddlcrnPRcjmh
fetching features for id: 0br9zl6t3H0BlQM5TzvOQ4
fetching features for id: 7utH4pVmFAmMpOmhq5YUYL
fetching features for id: 7u6YcwfKv88jRrhDOVhBa1
fetching features for id: 0LuxhJENJYd7yc315s1zms
fetching features for id: 75f3C2aMNaEis1uFS0oZZF
fetching features for id: 0aZONv71HjoKut86G2ghVd
fetching features for id: 5uPKoULQx82sU3NZOO9mDa
fetching features for id: 4heMx0OAwfILu13Lf0VbBM
fetching features for id: 2sWdpFc82A7NP4DPazBQ6M
fetching features for id: 5nS5kcvyJG7sqx6hVRtHWv
fetching features for id: 2OzNYmuerhhV0FVX97UJrb
fetching features for id: 5mz9pQZZXNpAw9CdQ7Bk8q
fetching features for id: 6VOcKiMjjto3H6kwxG90lm
fetching features for id: 4vqZep7dEFi5MOByjUV6sX
fetching features for id: 4Tk6pf48rjQGOaCKe5LKy4
fetching features for id: 10yauUCK4imhAhCCzlickn
fetching features for id: 2Qy2yfjZa4FDZdforumynV
fetching features for id: 4vXM6nJT5IgfQa9xKI9Da7
fetching features for id: 6Q5UkaoE4QpsSm4kykFhKc
fetching features for id: 0D2HP0imuFLuROJEMp7M0P
fetching features for id: 6tluFvNsaxJ0ExyAiNOvi2
fetching features for id: 1xnYucUwKO612Ht2r7JqBF
fetching features for id: 3ByqU21P7nf0vxW5s7fOMr
fetching features for id: 4JLnUYJHDNLYFRqzHzrawV
fetching features for id: 6yvH9SyHQnI5cMVj0cavDt
fetching features for id: 1udKn1oNKYQSQ9OmiIWCMu
fetching features for id: 1IVWnhqf8tcGu4EKGkp3AP
fetching features for id: 4on16ARfYTBVeNBk8qQA43
fetching features for id: 5DoMxGnY3yAOGaf0xreMlE
fetching features for id: 5h4WnIAIiyhiGqZ4wASNAU
fetching features for id: 5xiWenMGQxYG9m8qHoVZ37
fetching features for id: 1DM97JJjBJShdMuWI0B5L9
fetching features for id: 4XhZ3Bi2fuIRrRGQQ4PcCx
fetching features for id: 3Opx1MTq68q1qsidkMNGMT
fetching features for id: 2jRAvJqykySWxGmdD5BCWS
fetching features for id: 6Jx6jy5B6ATD4wkWAYGR0f
fetching features for id: 3ztH1smFnnMohQUocY9Jzt
fetching features for id: 5agp4ORTcHoEqTWNKgElTJ
fetching features for id: 36ckFm0oicmvX8bWEErIHd
fetching features for id: 4g2B1iGsws8hpI5c2Rx7Ko
fetching features for id: 1dC1vc2C0lPBiL7oCAAr2G
fetching features for id: 5TzrUONJL0qkk8WDJqEGfj
fetching features for id: 5fw2E2H6yZJWN5H11dDznl
fetching features for id: 7JCd3x25rM1cc7dxfYoKe5
fetching features for id: 1UJRy5cOZtsuTPKq5pr7XU
fetching features for id: 17odnqwMkLKh7ayb8ZEmBD
fetching features for id: 5cQJMxRQuYI9tVMKzmoBhu
fetching features for id: 7qY7QChY4O2D3QhwgvNx8R
fetching features for id: 3IeUG8Sgyjb5ujKYk7lW1g
fetching features for id: 6ijOZdht2wqGT34yuj8uH1
fetching features for id: 4pxUZiQifmJAA0l0KSUf1d
fetching features for id: 0MU9aeHB4uKciIIBCVhIoU
fetching features for id: 5Vy8PKv7gS5WDR1EUuk0Bf
fetching features for id: 23CMTOcuR6nPHxG5ol0mB5
fetching features for id: 2A557Qk0ftZqDSuAvmAxMP
fetching features for id: 5q8U2NsIc57QWjY7MZi371
fetching features for id: 1YyoprYT9Oz3QiOqhYHO8i
fetching features for id: 3wn2aA8h5yl39BxhynNP70
fetching features for id: 4g6BbL6wLsTD1iQZXGc5fX
fetching features for id: 7wuO3DvGbHPGYbcaY0qCqB
fetching features for id: 1Ex4c1VTqxomrggLvR5Y31
fetching features for id: 4CKqqKUKsjNTTNF3veSiWi
fetching features for id: 0820J4H0wwqH506WtDuhKO
fetching features for id: 3Xe5EdeHYuL8FGiiw1Kwku
fetching features for id: 2Zmi8n6vDjO4T0miMFDwI1
fetching features for id: 4gOI04TYKDBc2LDJ0u9K9h
fetching features for id: 4tTUOHoNICuo2JZOj2hQOj
fetching features for id: 3sBwq1Bhsd2thzSxC2PQwh
fetching features for id: 5PaWiOpagb2p6X8PbFBK1G
fetching features for id: 7rOAjhjlNx0LYsYJthOP89
fetching features for id: 5GInJChhrHyXmwb4tQFJJG
fetching features for id: 473hgaSdgEKvL20YdhqVaK
fetching features for id: 3u650FGOhGmw3EjkLCzSJd
fetching features for id: 40N0Wizb2BANJZTRZ4oivr
fetching features for id: 5DDmne2Ia9bfDdXklCOVjl
fetching features for id: 0QCQBP11w2QNel5LhkYskU
fetching features for id: 475yfAikvSAt5sQrDFkNGH
fetching features for id: 1HU2MNYU5CNIsjQgkRXz1k
fetching features for id: 5FGJkdpDfTsNsEYcbPnPtB
fetching features for id: 2RARojKOA0SR8VxWuFyQr6
fetching features for id: 1TeO5FsAnM9F6XOzJyImBG
fetching features for id: 66sX1HdE1EM1kBYgKU3kbD
fetching features for id: 4d8fqtVtYYE2wVkrU3mTMO
fetching features for id: 54eajp10qPgjUCiD2Ds9jk
fetching features for id: 2lp6dr6gTMQJgI2Ny0kwYw
fetching features for id: 2ucHU3u0UMQYyo40B9zaIW
fetching features for id: 5LQwvnlNn84XLcVxWdtgD5
fetching features for id: 5AIi7YlHwURZe2BNcyU9nh
fetching features for id: 114mpBe63wliCxik7TKcw6
fetching features for id: 6h2YEmoJIEuXzfc8b0wwOx
fetching features for id: 1pL1H11bERuV5QIoFd8Cp4
fetching features for id: 4HbzefakOqCIkwActU5b3N
fetching features for id: 3YhHxoJHKmKebNZUzCOOWj
fetching features for id: 2zCruKarW8FMGbK77z2yh6
fetching features for id: 2hOUhzloY09AsjJ61IyWbV
fetching features for id: 54onqcRS41n2Sp6ZwxNTQG
fetching features for id: 7sXEILFDaRhFK7UQZ9gEfx
fetching features for id: 6kOGvRl0ie97TShrk210ON
fetching features for id: 5xsXzVOEA83GlDZ14Q74yh
fetching features for id: 7hPIEh41LPasckJFXOznmW
fetching features for id: 71XEFgsJJFGn1MUyG4l39N
fetching features for id: 7tElOwaw2ONWiPCW8bDErC
fetching features for id: 0s7JK7h4rlpAYG0pEJC9Ow
fetching features for id: 3D6iiz9rsoy2PvTJ0Q8OBH
fetching features for id: 6qJSIEaUw2VDYRzzHcKjqm
fetching features for id: 53QdLR7B8sP1O4AUvHEjdF
fetching features for id: 3e0CoSLzOHHtjyYYmNUKaz
fetching features for id: 2ii9WItOrdWGVVhtcPJxEZ
fetching features for id: 6MGqxb12oR5TC6lIwQsANT
fetching features for id: 59RkCJ1LeyY83HvlAcVuYa
fetching features for id: 3rmpCyBbtHUW2oIMNiMYv1
fetching features for id: 4vPOtuL3HbN9zYYV5oT79i
fetching features for id: 3BjitDpDJErAxMGnqDpdMn
fetching features for id: 6YNDwlmzhWcTHt70Mq9IIE
fetching features for id: 2HCaIYjkvWSZzaSKUoOh3d
fetching features for id: 5BwkPXxLhEvIdeqzZHobwQ
fetching features for id: 1KgyVoQMJcYbX07QC3aIQC
fetching features for id: 2m4Pj6wQvhiBITHzjYZpsh
fetching features for id: 6bMF4pCGpD6JQxGcIaspw9
fetching features for id: 6TvAEPaJCqLnLP4tLzbjFc
fetching features for id: 4i6CSIdAlGHyz9PLn52Loo
fetching features for id: 4ErkRZoMu2sErWBHNyWxNR
fetching features for id: 101tKsNj1azODLsJbZZNiM
fetching features for id: 1E2FDj8wsyqAsEGMqypswj
fetching features for id: 7lPSEIzv0GFZj8KEnaFR75
fetching features for id: 3OfttbJAJNvkI4VwtZD2E7
fetching features for id: 0SRkuudTEWe2HOloI1Nssq
fetching features for id: 18TM70njJRFH4Fm4ZuwLd8
fetching features for id: 1i2U8DwAtHKFN75632DlZH
fetching features for id: 7BB9AfVb5bjEdpMN8HafdJ
fetching features for id: 0xUt2TLACg0gLlDGWsceGd
fetching features for id: 0yAS1Yhyqu3pZVIEwSxRxn
fetching features for id: 1o0TEStQmRssKSu41iQnn9
fetching features for id: 67SVvBhxFzSZiNoCGOfdTe
fetching features for id: 4MprqMObZb1NiIpZaAGTAz
fetching features for id: 1Vzr9AOdHw65UHoK1Vuxls
fetching features for id: 1IsaW2BvYIs0iN0VjeB3j1
fetching features for id: 67GjAr4MsMZgYuNW0pX1fk
fetching features for id: 3v2DaVVxtgsO9Q3MnVTMO0
fetching features for id: 6FfS6LTCO8qaDb4s9Q8l8T
fetching features for id: 0Lo5YM7ZvIcUnbvnqLMUre
fetching features for id: 4ryl610VB8Nb7aOKwuA4wQ
fetching features for id: 3haZcHm3HydDVxm3je3Zmg
fetching features for id: 5MrBeFD9PY3pCyB9lfB6Ai
fetching features for id: 5pabbbDXPaxz5phgY8Mnut
fetching features for id: 4pmiBE8CbMTELLsVQnUENs
fetching features for id: 43z94VXUDXzqLiRy7wvK3x
fetching features for id: 4nGIIeemCmkJ2I3hpC3x43
fetching features for id: 1jYt67ObwzHEKaa8DxrnV2
fetching features for id: 5QdkhR3MSMQRqbsW14KUXZ
fetching features for id: 4efIPPwKyU2bCYhzdgudX9
fetching features for id: 3Zs0dCcOoAkxwnYuKYCWZP
fetching features for id: 42wmL3XqWPyn9C5clU7VMc
fetching features for id: 3PmZmkgLfJtKdPjFF24uML
fetching features for id: 6XNny82mljrVzgv1zdrWzD
fetching features for id: 7nHLuhcD1kJyinxz2VabgC
fetching features for id: 1dPUQhlNGEaDm9Qi1vcL7I
fetching features for id: 7Jm0gQhWuBMzp0TI4ydm3D
fetching features for id: 4SfgngnC3koo5c4YMMYkcc
fetching features for id: 5Byd7d0WlL6qgmQb2SEoc4
fetching features for id: 6rGuOsQMY1UsvyPcpvbE4i
fetching features for id: 2V1jzWfUm1HdsqSt08fFtt
fetching features for id: 2fGe1krTxovW7xsgiaHkrN
fetching features for id: 0fXd3B3PbDr1fQvgZ2ZLfV
fetching features for id: 6M1WuCPI2AOBiR2NkJ6GML
fetching features for id: 2w1vbqV6Z3caGfLZdVLJyW
fetching features for id: 0Kd48ETagbgGKoXqK2Ne3H
fetching features for id: 7dHlQJqO7UWa9E2Vm9Z5S0
fetching features for id: 1Ug9kQitfdcgyrWDjvXV6z
fetching features for id: 0Mx6Q5XnT84BLk3e4ITJGC
fetching features for id: 3YY9XB322S8n5CIobQsR6h
fetching features for id: 3T4zow8ial809ciFETZ6wM
fetching features for id: 5zTsicOFimBo9Bj7g9XSkv
fetching features for id: 56OdLFVSIGJdtVpHrAKkd4
fetching features for id: 64NwEdfzwUGorCzC9tYZFA
fetching features for id: 6sm028EoGuUbVnzC6pDaNx
fetching features for id: 5bveD2SoLJh6JPDyY9mfDJ
fetching features for id: 4lTMgNop2AUchpTEH9ZAqF
fetching features for id: 3rouQsYoHY3HfZX1glHxPv
fetching features for id: 0jTX2E4WX4BtSnoVRUGZJX
fetching features for id: 2F4ix8buVKDFhVH4yjOg2s
fetching features for id: 5a8eQZcut98ho3QDqhDUKi
fetching features for id: 5UrPacEd3L8dsjwSNPDL2d
fetching features for id: 72MvrPaYBDVZfLapuVqsts
fetching features for id: 2cLZdMFit9ecOGvwYfTeoO
fetching features for id: 6Y609znJunsJnpW0RYf7iR
fetching features for id: 2bcV8ePGpwBw7HZCGTvvwC
fetching features for id: 3MeP8rMjztb9MpAlBaO6cU
fetching features for id: 7g599RRZDUJ4J6kCAh7JbS
fetching features for id: 0nmSuqq9khDg6piXYgyzOd
fetching features for id: 5gkeDbOU56XmoNMcecmbYr
fetching features for id: 1tZuun2k2fmLx5rF5ja0aX
fetching features for id: 4rbYdWpcjXi02Tm3hvmYlh
fetching features for id: 6KfVhijRswRpWIuxq7Eqlr
fetching features for id: 4mmoOJwXcf0nEH0Z2wivAV
fetching features for id: 2JZbhUQLyXdBtsJxGU339u
fetching features for id: 2boQ0A6CkYdKhBn250tc4r
fetching features for id: 3J46tDUbPxjGlfudGIaRrX
fetching features for id: 2x8J4apESFBOPmGqMpLh0a
fetching features for id: 5BSIMqJcGvM9D4wPw9CY4Y
fetching features for id: 5dKpUFw9JC4F322nEYV4c4
fetching features for id: 4BkHHw26fIlHEdcUqeCkgv
fetching features for id: 0VpHIkmXGSIJ8phPcRfQBU
fetching features for id: 2bTAf7hPTbjiKzvJLqGGbA
fetching features for id: 71K4AFZLc2hGYPiNncTJ1C
fetching features for id: 2WX4wOulDgaoWJstc4r34h
fetching features for id: 70EUOJoFKWuzPCyHl6l6nL
fetching features for id: 3qC08srhXBJ6DEQ7CwPMB0
fetching features for id: 52mslz1GDjNg4vmZHZGmHw
fetching features for id: 4MRhEMGSsBTaBX8IBNCe1b
fetching features for id: 1Ti8nW5kOkv33Swl3DZveQ
fetching features for id: 0UCiPhWITzF3yU2rVChCVg
fetching features for id: 7kxMwCdLI8SOXjLA9tWVyF
fetching features for id: 3tvqPPpXyIgKrm4PR9HCf0
fetching features for id: 4VFGjvg01giZD45wwmVUXF
fetching features for id: 0xawXToxW2wFhSVrkP40iO
fetching features for id: 7eGLhn6AJNiXJUS7VawiUK
fetching features for id: 2ZVb4m91QCXD2GFbxC2OwV
fetching features for id: 6Coo1Eje58ffuqt2JBDMPk
fetching features for id: 0WhZRne1XMn2ruGRlGJVI2
fetching features for id: 2aBa2XO23shN0lQjpL0G1K
fetching features for id: 16HrfeIFAE98VAXV4WdjPg
fetching features for id: 2cqBR0FuG3DWjwnurXUFdK
fetching features for id: 2nCMkTfSIjCSN7OjdQK1AC
fetching features for id: 5BkcuH5kyZC5ZJWfWHmahH
fetching features for id: 5vKop7Xx5LtKcEqERNPoYV
fetching features for id: 5gluIvIDxcH1BkHn58QPZO
fetching features for id: 79T991XVouO4qW6yUIH75z
fetching features for id: 7arkA3xW7Vg3ipw6VQinmU
fetching features for id: 1znMBunwCIVomAUacZdkxX
fetching features for id: 0wFZCKRA6ss5kFigKArHuY
fetching features for id: 6F6F3EX3FEntkWZKNjvEoL
fetching features for id: 37ycGnhGIt9nxYntGDXfg6
fetching features for id: 4cj5wrbKodTgwR8AMqdYUv
fetching features for id: 65daZIvb8sLV1QtYkziYfp
fetching features for id: 2U8i316GHJMluI42xqbThK
fetching features for id: 1D2VIwDVHzh5n7nkGk0vij
fetching features for id: 4DdICLH2xKTy6d4LFS5WjI
fetching features for id: 603aCBXRNkrSTiM4uRGkbj
fetching features for id: 2EN9rD4AlOBPww3ZtFox5y
fetching features for id: 4MFU8kCLOQD9nV03Gfvrkn
fetching features for id: 71UG7fxAh0mffJpyC07SiX
fetching features for id: 7hyuFoept7slNptV126UwW
fetching features for id: 560ROSxJmpRIIp89O1AVLB
fetching features for id: 4UPksaq5ggU0ZchtmHhtDd
fetching features for id: 6QwHXIZxcRn1EH4FM5fRAM
fetching features for id: 76PjPbGl8IaVqJWV153oJn
fetching features for id: 7vYA9ET5AUqJt5pBbhKmcB
fetching features for id: 4Su0s11An3mZPnMiPxD0fn
fetching features for id: 57GQHVnq4UIAgxU38lJwUC
fetching features for id: 0UglrUqb22msRsDHqDmVTY
fetching features for id: 76dD4B8fONKkIXxSfFTM9Z
fetching features for id: 6rd3jnfFTozqphQfktsdVo
fetching features for id: 5TrvERHRtiRshORTehfs0w
fetching features for id: 1ZI40a48FO8d3OTUfAdYwn
fetching features for id: 4QHapHuNmFvbvK9L3a8jmP
fetching features for id: 1V97AJ77KW3BnVYCwPwvfF
fetching features for id: 1PYpdnXFzFO4VM5K8G4PCu
fetching features for id: 7BNihfYAzfKrZ14N4NziFe
fetching features for id: 2ITpt5mcvMOI7kIJPqUvbb
fetching features for id: 12nrPFtvEUVFdbN2cM3oJS
fetching features for id: 1atDDBTMunTF3Vwn2jpTLV
fetching features for id: 5JBHz653h64fYqdntGj1rX
fetching features for id: 4imOI9rBD6RW7SgDjyWp9N
fetching features for id: 33XiyZ5JzpdfbxW8yv1Qnm
fetching features for id: 7EsyuIBXcGcEWGdkZMAIJi
fetching features for id: 63W5utSNaHPHm2hWD4jF7u
fetching features for id: 0ZQ6ckgerHYM5mOUxTl99Q
fetching features for id: 0O6rINb71GYZQoea5fxTqf
fetching features for id: 7GzisMgxLZgCi0Ed3yTw9y
fetching features for id: 0SmCDPTRx0gqqDGjGHeM2K
fetching features for id: 58QCFHBuRF8Fvkutf0HYAy
fetching features for id: 5O9sIXT7EBkvz6f3yZCuUl
fetching features for id: 6ydpo7H1hyzjnDhDKm8VqO
fetching features for id: 1RE85ZBVGofX9JfnhNDaER
fetching features for id: 03cYkjvuyZ4jACwo9MYEZW
fetching features for id: 3aXcJDHfMtDRZ9KLuzScrB
fetching features for id: 2Js48tzQZThEpumKgcgxWl
fetching features for id: 3SOxNBtEd95JHvdSIGhXnj
fetching features for id: 6HXiCCertmHIfuVHDdZ8QG
fetching features for id: 5naGmbqRY9AlitnRgbw0uX
fetching features for id: 02oIcYlpfTq4p3ssQnyuqG
fetching features for id: 3pEwXiu1AVstyAluolJnW7
fetching features for id: 7hxha93ckBwTck8s83Pu9b
fetching features for id: 31ivuiF6QnKMJBscB3tRmM
fetching features for id: 4BOvsc4Orv9LWNfSEFbzXH
fetching features for id: 0KOE1hat4SIer491XKk4Pa
fetching features for id: 4Gx5o90mo9HYZxE9GoxcP8
fetching features for id: 61xuOY4bOj3Z75SrUQ2Aqa
fetching features for id: 2jglHyoBmjsyQW2j0CAuhh
fetching features for id: 4kVdh89jfzuU3TB1L7Lbt7
fetching features for id: 5XgCk8ikjfTx02rgxNULEy
fetching features for id: 2Jy7WpTqsa8yJtrdvfse0o
fetching features for id: 0lMfJ3HjAXUKL517ePufmc
fetching features for id: 4VU6RQBoryrzDh9YSFT7rz
fetching features for id: 1EJQrVf9I93wlkzJAquRWP
fetching features for id: 1cdkzx4QGDL8V3J8I2iu1V
fetching features for id: 6HDakUMdK2KcQZZ4TwyAbB
fetching features for id: 7LJDHtkeSLTpnecBcQRl93
fetching features for id: 1woFG35C47HlZ8v4hjgmMx
fetching features for id: 2O5tP6uuOZBhVwDeO4T6Hk
fetching features for id: 6wXVUSh5lxvulIR1sSWaEz
fetching features for id: 6IuK4tKidp5nBRnGbHTbzX
fetching features for id: 7bhoxrOErOoqKUXJ6CA66h
fetching features for id: 3hYBWJCHTKaYRjFZWR0cfe
fetching features for id: 7BiRyVxIimvgnlmZg4vSi9
fetching features for id: 4w0D62o5qwUPtrcTTAHe7d
fetching features for id: 6P6adjzPbo1Ukb54i9D4rt
fetching features for id: 545PsoburZUtR9z75I2wjj
fetching features for id: 6CZA78dY3Xx1B31y0BP7mg
fetching features for id: 2iw33rWGH2cjSFTkFJRoUf
fetching features for id: 2BOdbqFnRer3wiYNMaV33W
fetching features for id: 7skXVFIQNnTNwBhhCYMKbn
fetching features for id: 0af0cAINmDtjCXGJfb2GAf
fetching features for id: 5wNgjOGEWjuCd2mj4ynRM9
fetching features for id: 4AmX2GyjCRyk4WMMmiqUjd
fetching features for id: 5A8Ohrypi99jnyYICKHvPM
fetching features for id: 71UPOlaPow5CbwGvLmPmDQ
fetching features for id: 4HIVG4xTUCk6ruZuXKz6k3
fetching features for id: 1cD8uv6vzf0zOyK4YtqMsa
fetching features for id: 21f8rcouzIT9PR9j78f5MR
fetching features for id: 2bLLKpVglQgnxfRAdmwtQz
fetching features for id: 2SqgxBocr9hhRXkzkD1HjC
fetching features for id: 21vO5hxkYIvnbi4S7tEuU9
fetching features for id: 0RyMaI0EvCiiXBx96vwcgH
fetching features for id: 4D1IQ6H104u2XEpqQPEg1V
fetching features for id: 0S9Kq7EHqTfJdy4nZrqaLA
fetching features for id: 0bYcOiKMwouNJkD00d5mRd
fetching features for id: 2Ki680uvSg17E1p7VLZvVL
fetching features for id: 5eB92pGxhuDFZGtfD6tWvj
fetching features for id: 2373nwcpoD7IXHmqQBmLDl
fetching features for id: 4xiXXumq1Hn3fHuYqTT6jl
fetching features for id: 1glf4huG7cQpPYQ3ho7vuY
fetching features for id: 2Q9gMRzQF3PTuJ3dCVVwmx
fetching features for id: 1zXpbZZZ9Lb81ZrTqOzW1m
fetching features for id: 4vSa14GE2qFNtyRp9Twc9k
fetching features for id: 5joPaFWU96257ya2GauZfU
fetching features for id: 50bub2xSIii5mka5owOPHH
fetching features for id: 1tuivE7XrJtvLu7qkMl6Zx
fetching features for id: 1jDsl8ikPxRAQcZCkRFjsd
fetching features for id: 2HwrXACzxpTTFRbYVtOoyg
fetching features for id: 1sb7AGizHorLbZA2mpNP1E
fetching features for id: 1CfQZztUvK8D4aEJ9OA9IX
fetching features for id: 3qVGeXOGIBaLdiDFoqZ7nd
fetching features for id: 63ad8aVVsWieYeG7oqW4Kb
fetching features for id: 3djP82UEfvX6YkM2HGg98s
fetching features for id: 5PRBOsNq3rc59vgLnX217G
fetching features for id: 1VV3bGtDOniEKmgyqfEAfS
fetching features for id: 72BDusX5digwIbCTDd8QXH
fetching features for id: 4T7KB2dRMyQ6G7Ws1LEirm
fetching features for id: 2EYXCRp63iQdmeKHXN8Rrg
fetching features for id: 6J0tg6nslInIHwk9NR2DL8
fetching features for id: 5jEsZaQqkEwJW0yovgo1h5
fetching features for id: 6JxLKiLtOlPV0XHbYJsvdl
fetching features for id: 30nTKl0xDGt9EQVrAEGJKY
fetching features for id: 2Bbz2NbJ0m4jwVizwLYHxC
fetching features for id: 3ogLyZFlYqXC0ozBjBm6Nw
fetching features for id: 7hYRSuBF8ZfV5gQlTWVqoX
fetching features for id: 3VVRXz0SxxyCsC59xemWT7
fetching features for id: 0hRECUb42yXfkSOS9Un1qf
fetching features for id: 3XIehPd5pZmHDpjJlAyxrG
fetching features for id: 5aIvM7Ov8VcKpu6lEr6Zd1
fetching features for id: 4MFU8kCLOQD9nV03Gfvrkn
fetching features for id: 2ub5Un3ktZ4L4ii1gx74th
fetching features for id: 2kOp0KA5cbubEgubr33xq3
fetching features for id: 7pDmkFLinntkJnSl1TyqDU
fetching features for id: 0KF0vGxM2aUvJB7N2I5who
fetching features for id: 4eLVf9XRDtMOe3X5KmQlpI
fetching features for id: 6LHzCuLzfZQr0V8eZD9TPC
fetching features for id: 0t22pTUA8MULIomcESJG1S
fetching features for id: 6TO29gYhF5EHRvYPgit3EI
fetching features for id: 0MUli897soECfYgzY3le4i
fetching features for id: 72vR8JFvBT1PLYabJGyC4a
fetching features for id: 1jcUhX1ocqQdHsQm0mRKrX
fetching features for id: 00kG3Kz2rxem4XswOmtF3s
fetching features for id: 0L36sfZ3rOf3BwReJQZBRr
fetching features for id: 1asba4YipSYUk4WY6A2O1g
fetching features for id: 5C94uTWvgOeUYbdPTUTGpk
fetching features for id: 1Gw1tk0DDbGkDgmSmyi4BF
fetching features for id: 7h2u2fQVAEL3EOuTMjvBLs
fetching features for id: 6bLyL52pIKd3TGgNDJeQPn
fetching features for id: 0bYjPXZEUYYhMwmd5KpVK1
fetching features for id: 0zPpqu1oiqRhtc186hvUbv
fetching features for id: 0cdd3tfG1tQMVyafEAY4N4
fetching features for id: 7FYlck8UYCdZkkpvKXfWgu
fetching features for id: 3LuHexaqSVWyVabEwR3UaO
fetching features for id: 2AObG8D3AVTKVmdLWWSsSd
fetching features for id: 5Lst1j9rvgyzaCciTQn0v9
fetching features for id: 3fEIQIVqk3r5ISmiU1Lcbb
fetching features for id: 58p1pH8V4ZLfJ1RusdeUIk
fetching features for id: 53HP1YlOvzYkImudH6FN6K
fetching features for id: 0R9MMH16IjnOIZXiQ9uvdj
fetching features for id: 2v4HD5xA217fJLGJSEgj1g
fetching features for id: 7cUhjPDUH3S6EgkgF2euQd
fetching features for id: 4j8DrdCiiVRWw1iRfOZcQj
fetching features for id: 2wLDsBiUgoOXJoxtCDm39a
fetching features for id: 7H8VmgeUtAzVG9jyEyyMR1
fetching features for id: 6dEKQOQChSmEKCrcfXdsvR
fetching features for id: 2DhT1bCyuYmBkV1kGDAgrH
fetching features for id: 4ZUoxQRuDxESt8m6bSNDlp
fetching features for id: 4L5XnD2PMJrc9A4Mcp1asi
fetching features for id: 2znDFdRm8TV6lbzmxnTPJc
fetching features for id: 5hFanCfwPB0araPLGpEo79
fetching features for id: 2Co8otif6KH0pIMwzNrIdf
fetching features for id: 6CJe6vTWtIJB4sa8csqtfv
fetching features for id: 45tNymiGXb2afxk7opDlmA
fetching features for id: 2yS6Vq5ErsiCLKsgTfE7Nv
fetching features for id: 0J3nckOfR9lwUdGdsPnXUX
fetching features for id: 23eEGOLfI2pFof1rmQNpg5
fetching features for id: 4QVLwVgNxqTp73Gfg6pS56
fetching features for id: 39VqKubwf5yfOgAs00OOBi
fetching features for id: 3U4fKeMEBEPTdDOjAymS40
fetching features for id: 0y4WLsDFcdhGEiyGrTXwV0
fetching features for id: 2LlLifYZDCgja2m1uuFZRi
fetching features for id: 4rmB4N9wNLiq18OT40mZvd
fetching features for id: 2EuyCCWC1aJlQKjcIANOUN
fetching features for id: 0mD2pnYVF8DB3oqtY5KM8h
fetching features for id: 0gtlgRyGc3dSjMkWQR3DST
fetching features for id: 03QGOXxk0HWLuSd4sHeZrL
fetching features for id: 7c4lxAjzuxcRvUdZrKf2Kk
fetching features for id: 2fdSWm3CHTz5ljdESiV2hT
fetching features for id: 0aZrrEDEqwCxxlRdXLXKJK
fetching features for id: 6blxz1Nvtv0u0EznvX0KEa
fetching features for id: 7iPiOABYfkx6iCTAhtVBbm
fetching features for id: 0sOJ0OqYFdARjr4lGyYaq5
fetching features for id: 2LzcpixYEhVKvoQq7wNbCs
fetching features for id: 2UbUp7nSoua1BTw4zBAqGJ
fetching features for id: 3PfdTUIPKYym6QAhb9JuZ0
fetching features for id: 13HjLIRtAmIBafMshw0TRE
fetching features for id: 6FH8qFGw3k9x9d1S3N5sTN
fetching features for id: 4IISGbUpoUAXqDdZews1ve
fetching features for id: 2UoVOxO8lFcHNgLHTQ0ATn
fetching features for id: 65b5gubkkwqH1J5XuRwIvO
fetching features for id: 4EuFMPZc9DLVC2Rwg8hXPK
fetching features for id: 7ijsGsXIs2N44m7EncIyVo
fetching features for id: 3ifchznIPMVq7aaXKg9IjS
fetching features for id: 7lDVjmU3Iytoxz7mSdanyd
fetching features for id: 2jqkVBMchQP8jiuEJQqQt4
fetching features for id: 2ulPFs59dOCrd6JLkvNIJR
fetching features for id: 0nqnKtIzXjccQBh9CqF00T
fetching features for id: 7jon8ItjfPNUkn9cWtkhnT
fetching features for id: 3yAVLukdNZvGDWLtr7oFGl
fetching features for id: 7mp74IKoo9oaES5JwBWBWO
fetching features for id: 1rY2Bku0YzUXkOpI0UKEeO
fetching features for id: 0qVUFuuNIm5p643GyB9tP8
fetching features for id: 5lZ3sezVWHTpmdVbHyMDBT
fetching features for id: 1dYDIWo3LKxziC1a3stxwv
fetching features for id: 7tpKhVU4RzCK8R6Ry0exhb
fetching features for id: 4WbDOELVQqZ6I65U4HaZnE
fetching features for id: 75rN9Bn96K1hBCkjyfRqCh
fetching features for id: 5GvVxxwieOVIlZaCU9bG0R
fetching features for id: 2aezcsIssx7sGfFZojBIxp
fetching features for id: 4FcfemBds5WQvH9jglw85S
fetching features for id: 4ELSyxd6DQ53eRxYLh1FPq
fetching features for id: 0YkUuxTaLCYORCcvufbzuh
fetching features for id: 66CXpOXmoL4OdAp9QOHfiO
fetching features for id: 77dBhsboEbsoO8heaoHlJ8
fetching features for id: 55UQqjMp0ehTEPw4LUP77i
fetching features for id: 0rkG4IEfLd6VAhIgUVDr45
fetching features for id: 1HpFPHXxeS7DNgOJCIxejA
fetching features for id: 7I8PL2ZXS5CjmU6QPpX9Oc
fetching features for id: 7DcXwLr5vbDroOgVKb3PzA
fetching features for id: 4cyLb9jNC2oS0Eb0GLmMta
fetching features for id: 5A8GIIn5vD5Jrm1etHXS3d
fetching features for id: 0taWJ29dLX8811DhVal4UH
fetching features for id: 1kjJZ1cuWV8cBICGmQJcy4
fetching features for id: 6n61ehXOOma2LdikJ9A5wj
fetching features for id: 6Be0IyyMfaE2XdOyeLHHJc
fetching features for id: 1AZ5G23Kcn9h5Awws5Ekf3
fetching features for id: 0ERzRJRLPikYbM1PkLIr4w
fetching features for id: 4GPbOUUbitW3e6aEVccrhf
fetching features for id: 146vkvsbGH7xQYRKjAYEhG
fetching features for id: 1ur34rpRdcVfPcecoNZz0w
fetching features for id: 1zc9G4b4zbRptS59Lg8VZu
fetching features for id: 4N0VVY1BeWVpllqcwX7mNH
fetching features for id: 1RdnFVwreBGgMyNn4DbFh8
fetching features for id: 15w9bLXVzEqArvXiNNvAjl
fetching features for id: 5CuDTzGY0Aik9gsFfdNSAY
fetching features for id: 7Dtab9QxjMZYupetS5RBhF
fetching features for id: 7LAMmINm2WrXyvFAD1iVnD
fetching features for id: 27UeXVl5WQi3r23TzId3NB
fetching features for id: 3TpkE4WMC7era1Nq5waLCa
fetching features for id: 2c8mUfMLoIPHgKT3abqqbP
fetching features for id: 15ztgB8XHpc0X5BcYKhNoR
fetching features for id: 0BZut5Sp3KeiC8FWL99two
fetching features for id: 2jiRBa5wVKmJyQlR8nGfwK
fetching features for id: 3ySpsI2dzdyGFHBg9ykLbi
fetching features for id: 3C4Nmn6XSJPSvOFqMCLC4u
fetching features for id: 736V9Rm2sj3fb3Ev1vIifz
fetching features for id: 3F1cnbWEmpv5wsscAXaWsR
fetching features for id: 64hNKDyAZw9FEUFqH6Im9o
fetching features for id: 3FkJ5VtmZdIjGyTjK1pHlX
fetching features for id: 6aCw4ADUIyVQsa08UN6adY
fetching features for id: 6FXLVvGylQoxxpcA8mWGRD
fetching features for id: 5EGrkw1itB7rzEZVVXprJ2
fetching features for id: 6tsjCzciIajxisQtbUkOUa
fetching features for id: 6kAROIu6CLVjulTqSPYmAP
fetching features for id: 1tG3zCppK6MN013nd36B3E
fetching features for id: 6ElbEXlLbjWiRgzS1HpM2J
fetching features for id: 4lvd9RUYyT5BGG1URmeUJz
fetching features for id: 5n4jYHakT92Ia0005tNYPn
fetching features for id: 0Q5rG1IxfF3qXSxifWvFOE
fetching features for id: 1O44CWBo8UGvxj4QN4CJtb
fetching features for id: 1WetFltHqdcEaVUJ2EvRc8
fetching features for id: 5c8oQiQd2bj1rr5qRqEuPZ
fetching features for id: 1CIhm1lOEztiL32I4EOisf
fetching features for id: 6m6s3MapXf4go7fmDJv3IP
fetching features for id: 6pMAYD5AXLfqq6r8ciuvT8
fetching features for id: 0HmlnR9sAgKSW4NBzzCBAa
fetching features for id: 1pVFchEqnrpIEtuGrB4srT
fetching features for id: 0JqEy94wlCGSkbvRnONQY1
fetching features for id: 4G8asO3AOenqqlTzbQfUkg
fetching features for id: 1WvcgDbT79xeAa7Mv0klkK
fetching features for id: 5Hcr7IkfNZ1FNmikY9MNly
fetching features for id: 3AQzEgGJ0VfJDedxatbgi7
fetching features for id: 228WP2hQYq8IRuZrp6IqBd
fetching features for id: 20iH1uibQIUqNFk3wzG7AG
fetching features for id: 3898C4AbdbptwYet6547e5
fetching features for id: 7hW7DZ42AY4suLWXymcnXe
fetching features for id: 2kRjJbZDhbrlkO9wDfAwqt
fetching features for id: 4ep8BNCyKcSj2zFbeUenSv
fetching features for id: 4Z7iDFkWNxRfjd9jvLgbbI
fetching features for id: 5m6N98LZkgqYuuy8RMILdm
fetching features for id: 4dpufyK2lWWG0Nb71zuEpV
fetching features for id: 4J0aAZIzCpgFO9ejrNj6Mb
fetching features for id: 3EfVzkwQkJm26BM6rywtMj
fetching features for id: 2hS2lJrCKNWJfKZdaxzjS9
fetching features for id: 0CEIKXrsriBeQWTTE4eJvt
fetching features for id: 0s7UEtczBGK68zYCi1y6W8
fetching features for id: 4UcxTnA6C5vCW79PIZ38Vx
fetching features for id: 6L2woktiAuW35BrcROmhW5
fetching features for id: 4rqZUbzWuy93LEr5h8Wj2c
fetching features for id: 4jPdrkQyXbGj3WEAnw4UVT
fetching features for id: 0WO97JiTY9A9SB4kAelsTq
fetching features for id: 7uNVqVGLikR52Cj0BOFnQ3
fetching features for id: 0t0lQGbNepBQLTmk8HQtMW
fetching features for id: 0WiKz86jAqiP6ZnuAZv8nz
fetching features for id: 6nWneWHV6S5FhXkVBeMoLE
fetching features for id: 5fSMV4bZ7qaSIdvUMlxDpR
fetching features for id: 23kC1wLVajqmlKE0gTVGMI
fetching features for id: 5pntv0IjUKlK1d0jvwxmHa
fetching features for id: 6gKqhbNsePszT9pyRtBlKN
fetching features for id: 7ykdJKyQz2Al7pX9j4Lfjz
fetching features for id: 4PeVnBSbbBpGMkhnXsicTl
fetching features for id: 63tX6UDIPVPu5nwCmV44K8
fetching features for id: 0NWZ93QeTeS5iVRnMxlAuc
fetching features for id: 1i9KZctiFjJwuD2LFTThqN
fetching features for id: 4RgH6QNzimOOL0SWdMhpzY
fetching features for id: 0zpgTi6oebwADr9jFgPcCk
fetching features for id: 0kTsnqvga2TQwcItM5Gtb0
fetching features for id: 06oW9gZWV5nfoaAoT0MsdD
fetching features for id: 0wGKdgfF9X8KCdZjfsvNui
fetching features for id: 4RlxTX3OM1ISl9DIRctFw4
fetching features for id: 306COoMcHZSFcXJmfn5bLs
fetching features for id: 7hXngqMDRqkAFrEIBy3ewq
fetching features for id: 08JqHYdrxzYA1mDEgZhpYV
fetching features for id: 3ln9brkScMKIE9MsPirzLb
fetching features for id: 1ko9hCONG4gae7DQnsUiZV
fetching features for id: 78fBKpVQLaBQheVqYf991b
fetching features for id: 01QicLin8hNjIcUDuE0nRP
fetching features for id: 2ago9Xm0T613VHZpXiqLRh
fetching features for id: 6Mm5Iy2IghUBMbwsrQg2hM
fetching features for id: 1g6b5h7Zyo54Nub1aHlHbf
fetching features for id: 0AsLUbhdZkGn5ZIN3X81n2
fetching features for id: 1lPWPX4aVUbMHBz0XSvUP6
fetching features for id: 5vxTKdtuZdPVMsjHpKdAQH
fetching features for id: 3qRqQmEgDHYaM5rT1Zp4kZ
fetching features for id: 3cFPZOvrPdyjmZSoXh09Db
fetching features for id: 40yJppEKT0Yud2RUsXb9tk
fetching features for id: 3sHGi1Ldr0JfoeZMoV9sNO
fetching features for id: 7uJB5WRcQS5QgLrSRQpp4A
fetching features for id: 4jR1ubOUrzx7GOmHpRCxRa
fetching features for id: 06HBSxmvh5kXGUX3dS7RhJ
fetching features for id: 7iSG8g8HVyGz8pgMD5cG6B
fetching features for id: 5D3wNnaoeXETuEDg6r3Wdm
fetching features for id: 2mwiLZNriYnykYQrIe5uK0
fetching features for id: 2UcS1cuqNEioUR6hYJMaeY
fetching features for id: 4rufn1lwE4IUQJWmrOWxLp
fetching features for id: 4uKyLqJHDjT6mdA22ofxuM
fetching features for id: 0Q01jmXxG9gEZi6qSdX7Ss
fetching features for id: 5Yi1Fh3yymr9lwFYFCjtk0
fetching features for id: 2SunrmrLd22GYqNI9kRSPP
fetching features for id: 00UewKyVT1E2BawrGAwoxw
fetching features for id: 3GO13GvPS4aai7ZN2Fnaur
fetching features for id: 2t8n9fRJQHW7yaUOOw1g17
fetching features for id: 5tt4gWT4Y3Vwwjqa7VNeIJ
fetching features for id: 0kfbQsz5j5ifJmtIn9LCeH
fetching features for id: 0EP7T1h041oiEfkNCqbST2
fetching features for id: 1q4tN454Sqk0g3OBFvXxym
fetching features for id: 1FpuUMrFc7yQ8lVUolmFiw
fetching features for id: 0ryQlPz1nO3MFn4N7ij4pO
fetching features for id: 47842etv4tqCpMXo3iQ9MQ
fetching features for id: 58i8TipAFPTXapgSOyj8AK
fetching features for id: 1S5ZByB4AaumR96wpHMRcV
fetching features for id: 2BPEPkeifa5LoOg2Cq9bkx
fetching features for id: 4GwosqCFTn7RKfyPgy3kWW
fetching features for id: 700L8fTVK149vkRtfcHvk6
fetching features for id: 3wS2DFx6AUsk6XgociYWwq
fetching features for id: 18emjfeYAJGmddLuGudbzl
fetching features for id: 77HCco7fzc834OoV3XNzW2
fetching features for id: 1uCqMjz7ABl7dPwhzPEzIe
fetching features for id: 43tXphvUlXym54Z0cg1rjd
fetching features for id: 3EQBI1Fiv7z3QfqTeb9Tqk
fetching features for id: 5uPKJXCJse6seJX3DegLDN
fetching features for id: 1i2LC3Qc4rqTyyKQloxb0T
fetching features for id: 3YdXeDgkVDM3vZm5wxw2AI
fetching features for id: 0IenVuQhHGEXycoBh1TQZz
fetching features for id: 0oJkptRc7sRzp6Fy3lSAib
fetching features for id: 3DgIaxMZ7gwc4EEIOtor6S
fetching features for id: 7orZfo1mFmGZRCI6j4ln2O
fetching features for id: 6NWuPF6vDIL0N5TTRRlb9x
fetching features for id: 1gPcCNd9BjCcBWPSucnyJH
fetching features for id: 5FRbCNMJX9Xe6FUnVaa7mS
fetching features for id: 4OncTbCLRRozq30FZfx2Tc
fetching features for id: 3gBkaIy0nE5mDiYeC5iL97
fetching features for id: 7c7rF8Ojpkzulj6lpdZ63O
fetching features for id: 3OHz1Hr64jByifpOS61exg
fetching features for id: 1SuXJr2CYkE54fU1Fm6aEL
fetching features for id: 53437y5LWc8gDEGlgUrsqd
fetching features for id: 4jZrtPRNq9zlNbbCxTbBlH
fetching features for id: 5BzMo1c7fhTeULjaTa6Nta
fetching features for id: 6csem6tEhhkajgNUhIDFB1
fetching features for id: 7y4pnfDktzZZVwWEa50PDU
fetching features for id: 2hpumOs1fXt7nGj1OkG7TR
fetching features for id: 1nGj8dPY6iV3wppAaCs77v
fetching features for id: 0j36IBRfwm1Vvwz2q7Ve29
fetching features for id: 6f8RdBMG6A7EMfVF7uOEqO
fetching features for id: 6g0ndHejSrzJ0CX43RqSq5
fetching features for id: 5PZjtF0B9w34ivVdkX9UUM
fetching features for id: 1v8oUOlXSyCnu47G3kjILN
fetching features for id: 1f1ytbKtwlVmWELCg8rqjB
fetching features for id: 2uaXefSuDDG0a2QjFKTA9o
fetching features for id: 3Vqnf7ti0QVdLC2bT4dIw0
fetching features for id: 27NMby3j7tjrSvexwfCgio
fetching features for id: 30sO6VEV1pUt7jFk4ieA90
fetching features for id: 2A1nMRmn1Z9FVSPjsX4hrl
fetching features for id: 6DbFaoM3IQzZpZyiNuZJca
fetching features for id: 7z72UdscXF0hhNUUYWLqUN
fetching features for id: 2e5EMfbehH4bjvFCg5806Y
fetching features for id: 1FKzZ9Z2mlXlxOp3FDteKr
fetching features for id: 7tAwfqQWDA1dLKiLvn8EgH
fetching features for id: 3LY9qYimjPrLSaoOxmliaV
fetching features for id: 1QKoN8bfkLOr5RTIX0iQIV
fetching features for id: 78H6bsD1Gz1NwpBYt9duDW
fetching features for id: 0oR4XiH7xD3rEaJcQSeS2c
fetching features for id: 7GCqhk5tefqvhZpCrsimd1
fetching features for id: 1htnmqG4C26nDLr7Hx50aR
fetching features for id: 3eo1eOOEkNU7Xst5lKimfF
fetching features for id: 31yNB9p7R6CapMvmjcbj48
fetching features for id: 0XsYuXWmbojk5SUHXqPXmP
fetching features for id: 3d3DdH3ZL7W8wRDVY0ST5C
fetching features for id: 11p8gGHiZ3zOJbU3CfFTiN
fetching features for id: 6wP6XDZH9HiMs6ejL2R6nj
fetching features for id: 4I6OrFObPlJNl1bP2KZTjA
fetching features for id: 10CQhAezi1GXuUotRAjXqB
fetching features for id: 6y37UUytUqxROAfwlCpXqG
fetching features for id: 0rxUdgmRRjzISbhIOi0Di7
fetching features for id: 0Xi2XCaLKGy9J92rq0AGps
fetching features for id: 6pNzVVFcbpK1V9hSNxx5in
fetching features for id: 5s9DL4WYuEuu5vQWln7FMc
fetching features for id: 5c3ve0JNd9Qa3EcSFol2pU
fetching features for id: 4QBAiXG5GqENph08jFmVmt
fetching features for id: 6zDxzDBoxZp2u3JqjYBB2U
fetching features for id: 4TkywTLvVmJH41jxRRXcDP
fetching features for id: 5pQdkHkwQfLMkUE5UaTN2A
fetching features for id: 41HVXlT0SYKg1kZU9znTBS
fetching features for id: 02hIV5OSVZBRia0T3tqr5w
fetching features for id: 0plC6iWLWj47wldCTc3oWs
fetching features for id: 4UcxTnA6C5vCW79PIZ38Vx
fetching features for id: 5umbw3dTR8vGjhsFLjZg9r
fetching features for id: 6kR2BCzArduYNuJdtezM8L
fetching features for id: 5EVWX6bTzP8YKsXex3mee6
fetching features for id: 4hCdeVQKc1yPBnZ9D9B3Re
fetching features for id: 39R6fdnQnRFNkEXIRrZLyK
fetching features for id: 0YgdjmDh0OEGyFsMBHjEoH
fetching features for id: 4U3NxdMXeHIGph2gjDyQqK
fetching features for id: 0cb1KmRxv50ynSYpChPzds
fetching features for id: 6ch6E0na67OZxbafYsHM2p
fetching features for id: 04JFw8lYhqwFn1dXbdGfc7
fetching features for id: 0uxA9vWnVtKxGizcfhufN8
fetching features for id: 30xvqP3gbar6jQsLZrdJ2B
fetching features for id: 22DvwrGNMvpikviGn3wSxu
fetching features for id: 1FYsHsqLlTiUxcDqQqhy2H
fetching features for id: 3h25PQrDynfVyIukbN5ceN
fetching features for id: 7I3ILUr69yANI7eWOTrE4S
fetching features for id: 3xagLqC8BB0lAJukr00TKV
fetching features for id: 27FhNKYP8B0F3teHdcD5ob
fetching features for id: 78Uz5O2o00RpnNNlvOMl7K
fetching features for id: 01IpNtCBRCrPj91rgARPl3
fetching features for id: 3nBzI7PjzcD0emsy8l6tfa
fetching features for id: 47961iZtJPz2x1kCCAbhHl
fetching features for id: 4ycahhaQdjoN61KNFxvGA9
fetching features for id: 7E9VSMXD5RuAqLHIA6TQuT
fetching features for id: 1iTu0QWGXjKNOxxna7tyl1
fetching features for id: 1vOkVhorNtzJhRsDB5Iubg
fetching features for id: 0rRER2rlfTJ3LqFqv5Knpj
fetching features for id: 1qOqhSno0LFwPgXlcK3umM
fetching features for id: 30gCedymqDf867sbH3CGyp
fetching features for id: 34oA8HWchuqvJReQNV8VTa
fetching features for id: 7ae7k5vJV8BGAe4xVp7dng
fetching features for id: 0vntIQmeOMe5ybYfwghVxV
fetching features for id: 3au2b80BEdztCcqk2wj0hG
fetching features for id: 6NssDwHnosForAnJZyM6p3
fetching features for id: 3M3sDDsHBXu7TkdyahTj9D
fetching features for id: 4K5OFH9lQKVebny27xmgW4
fetching features for id: 6U760jf1bo0toem4F9WF2S
fetching features for id: 0d7vdZtSkKwUt5H4gxoOGz
fetching features for id: 3wNim33XFde28o2O84JzFK
fetching features for id: 21VxoynyT854WQSMCNyOOS
fetching features for id: 2S5vzLXND2ZQXhGQQjChVf
fetching features for id: 2s45jzEVXjE0Y0Fu6CjG5u
fetching features for id: 0PdZBmBUw8vNqjCROMY50H
fetching features for id: 2BwqYkl2WmhBC6ecGfN4YR
fetching features for id: 0SOIRMizDwZQeNS7M5MZaF
fetching features for id: 1UDfT4MPWSIDMc7HoVAOFo
fetching features for id: 3yOYPyxa1KAhda8OoygJpT
fetching features for id: 6K8EH4jCNjadapChPeiGAW
fetching features for id: 2hTFLJSd8QaXbxOcltHfaQ
fetching features for id: 4RpzG7AtTknnq1yGNSmo3w
fetching features for id: 6vPS75nWOKkuH5WTLD8hDc
fetching features for id: 2uNamFXOtxDyCO8cLGSUuK
fetching features for id: 1DpFGVHWfpVrzy7k5H2yTe
fetching features for id: 7gAn0WTuoRXL6QkFTqJiRO
fetching features for id: 6r9eDOWuIHId6TAdOrZOGj
fetching features for id: 51P4cUN0BKFM5tKmF8xpRQ
fetching features for id: 6NYAr0Lx5iWQj7lDwh6jbO
fetching features for id: 724k0eWcby90V6kIVbozGq
fetching features for id: 1OT98aV87tkMKUefcKOCiZ
fetching features for id: 54BnkUuJaiwdboFozwW6j2
fetching features for id: 0K4019RRyJNxDgsbm0lUXe
fetching features for id: 6EclIlaDBBDzQPUYLJpDXr
fetching features for id: 0wh3HztmNQzf37e4mnol78
fetching features for id: 2BCjoFA0nJlXBLRf3164bN
fetching features for id: 6XRkqwxyWLyEGIN8pnMqAi
fetching features for id: 76ELJgkoU0CdwByF4hrYkc
fetching features for id: 0eAvTDkEwjRMdD8gdMqB18
fetching features for id: 0N76QYbwzg6WLNNjJiDhgX
fetching features for id: 7HxJgi0wLX58NrhrcViwjg
fetching features for id: 5Raqu2XetM9zEQeNi1tPR8
fetching features for id: 1VjxCSzzRYTpzKkutZWnhv
fetching features for id: 1dXcUx5gQ8rkHctSsRnZuH
fetching features for id: 1tSk1iVz7FI9BVAY7XRc0T
fetching features for id: 2NbqctVEPkPB1p6ONwzSBA
fetching features for id: 5DDGzWe3cSR5rJ2WfycEQI
fetching features for id: 3XYBPq4BVWwRMc0vsmDMxg
fetching features for id: 50lNYns1QEjDKi8hl6p1c1
fetching features for id: 6PpxbyZHlyQdrNlpT7wRik
fetching features for id: 1UMzgGjcCec8QIbA8VuxBB
fetching features for id: 4uK6BtqhRoEaZopGJ6FCef
fetching features for id: 5UcgtnKxuy5f8u1Amshm9n
fetching features for id: 5j6Gl9NY0wZfNd3lNk4Ulf
fetching features for id: 171cWdP8aG0KMv3OIuNZ49
fetching features for id: 7kDwUS4LRsSShoPkaoHcfu
fetching features for id: 39woPeTxfi48gt4qJqXIoV
fetching features for id: 5lFQmqmOVEH6XlOsZOk9TT
fetching features for id: 4M6QTzDNkmUuz7nOSAehgo
fetching features for id: 1FXX4oUbO6eJEszwR7yQa8
fetching features for id: 4DVSn9CXSXehYTkHXZl0Sy
fetching features for id: 5HvpTcuCZ1oC1cClE83owm
fetching features for id: 6mS99AQ7ZeG7uvrc3dqUyc
fetching features for id: 0gkxAsG2Lng71KBT31TZNd
fetching features for id: 3xnPDI1J1KTlqF6Tiqc2Oe
fetching features for id: 79UEgHE04gFuv6S6Vqi07N
fetching features for id: 7rxev6ErpRE5VYaamjs4T3
fetching features for id: 6iFYsQckCCtGsabW0rXEPn
fetching features for id: 71DgyAgkGnisVjQmAC0IFf
fetching features for id: 1Uy1yJyKugigHiP4omXQbZ
fetching features for id: 33dpZRUyyjAvGkS8xDJkkO
fetching features for id: 7Gk1QKi2BAZCnrYlrYEDjC
fetching features for id: 1msykqPE0qoZig4nb9khI0
fetching features for id: 18G8KqnIIFWoZpRKlrAiQu
fetching features for id: 2VhF1XumHGHqQhaO7bZEx4
fetching features for id: 2YqLWoHop0cwpeNsjMcxqp
fetching features for id: 26mfTMUbynV99KpHmZlaHF
fetching features for id: 2JwLfiU13mnYuiqcqriOLl
fetching features for id: 6BRIjKLGDzIOUZDQdY7I2K
fetching features for id: 5kJez8nvgq0HqhWy3UTQFx
fetching features for id: 1URixdT65DuE1s7nezaTom
fetching features for id: 1oB6UQzxeh4E6dLnTjTQnd
fetching features for id: 5OPlCwzUOZkBMomYnCj1vG
fetching features for id: 293W5tGqQJC73Q1SEBwDo7
fetching features for id: 43sOCBgpVLpGkgEiYEm0xI
fetching features for id: 1TZVi6jRPIcsqQogany6wd
fetching features for id: 1N9NFnSUv3r3huP3tuQ0IS
fetching features for id: 4Y58VuD8vlQQrD6dap0wO4
fetching features for id: 385JWAuc6mbUvSLZVPHikF
fetching features for id: 2NGANvM9qjeqrD9Yo9uNHR
fetching features for id: 09s29rfaQr6oCLWO7xFVJZ
fetching features for id: 20IjsA4PoP8gz6mWySdsmf
fetching features for id: 4mo62uR6qj3LBIvJ2liE4g
fetching features for id: 1LHafkpfE4P1iXWd7DqJs6
fetching features for id: 0ijOfCj9PHvhnrX6WXrQns
fetching features for id: 0xeSDTXE4yc7AyF5pleSe5
fetching features for id: 3EppYZZJ7yuyRZibNAYBed
fetching features for id: 0zV82dxI3l2tvEiO32lrCW
fetching features for id: 62oulSzlEuda1C1ray85kl
fetching features for id: 0JSrMb4ohAYkFwS9qwoyA0
fetching features for id: 6QrS5qI69YwonGIACU2JRk
fetching features for id: 495RcEScKLPwye1XGxv5Rp
fetching features for id: 7dje06Lly2PO8Ffn9y6Hv1
fetching features for id: 23DIG2PAnTua8khno1RrUi
fetching features for id: 3YSviAGEU8lslBKSNkc2Z3
fetching features for id: 53eb2LAQeMT5WpXTZK5muB
fetching features for id: 43Tf5c7fcBIvuuCSBODKkA
fetching features for id: 2G2YzndIA6jeWFPBXhUjh5
fetching features for id: 5SQlhb0WNlnr74LpiNDjHW
fetching features for id: 6w5j6CHKVppZRanO9Z0Mhd
fetching features for id: 1QjDcQh2cpJvf6ii5irzVz
fetching features for id: 2LW3k5aClqz0PN9Jkf7y1H
fetching features for id: 3n36OdwK6RDq1VeqGf2f7O
fetching features for id: 4pkG26mP5wD4biteEf9m1Y
fetching features for id: 40ZZwXyc7jmfrGsJt4mGri
fetching features for id: 6BiQOxPWVZvum3BZ2Bd4a7
fetching features for id: 0Hak8JUqy0aqo2HQgTOR6v
fetching features for id: 26p6GNIQfv9koDVD4rTouu
fetching features for id: 0NYARSjTaMT67cbW3JozJI
fetching features for id: 1Z7xxz1Bkm7DVzl76VkyZH
fetching features for id: 2c4FIAviMIwuHBonZEO6Ut
fetching features for id: 4W4hL9jJV3tMCSqDLq12WI
fetching features for id: 31s2mbxcx4LX1EtZg1fUvA
fetching features for id: 3rfZtFPGCiqZokHqqY4rX1
fetching features for id: 5stfCnP5f0oXkCUYRLGMI8
fetching features for id: 2dCZIytqWZEKdup6XPDmn4
fetching features for id: 4cw5ZVMd3UdRHnRsO0OlFN
fetching features for id: 37sL2EUHL00w7uiH5tpY5P
fetching features for id: 4MfawSfFYY62G97u7w00Ai
fetching features for id: 38L75iLo6muLlApdRHwCGO
fetching features for id: 6Bjkd0z117WySZOMeAjJLJ
fetching features for id: 28ODIjaThE6hBOUAAl7y3W
fetching features for id: 5usNPz6U1acDrkUYekGYDY
fetching features for id: 2Z5rJNI4Wdxud3vwm42l4n
fetching features for id: 2Q20X4fKdfYZxMbrqiO1S1
fetching features for id: 4HU9laLeRPjx2xB224e2CI
fetching features for id: 29sjfQL32GkhLWdAHfH0vj
fetching features for id: 7nzK5Tor2Z8ij4srvvbf06
fetching features for id: 2f3vUTMZ5JL2GauxqDC0la
fetching features for id: 24mR7NmnxdNXxxYnZ4qOTf
fetching features for id: 44Btht5msS0IQ7o2DZuFdu
fetching features for id: 49c763ZySFmD7XCCGZyEqf
fetching features for id: 2zEXeKw3V6tNJ0j2Xd19z2
fetching features for id: 11zT5aL2p0v0CoDf62dTtz
fetching features for id: 6hqzu0PhIeH0iNg1igvfVL
fetching features for id: 2nmoiK0KxNHsbfOUR5A8bG
fetching features for id: 5KBoYl77AuNtZhqSapT3pe
fetching features for id: 6VoYOehq8JGLptvbye0yCw
fetching features for id: 1P8r9ZCiVpG1WtBdge701d
fetching features for id: 6FR1i5g41rMtTSoCK6wQZc
fetching features for id: 0s1e2Pplqp0ExGFUdQ3mCr
fetching features for id: 2nH8k1bk6a5q9cGR5aeWg5
fetching features for id: 1bltfamMcxuxLvw6VzEYDC
fetching features for id: 302CaFeYCMGRad4UvnRF8J
fetching features for id: 4hpqwNxOhexxVStNtP9T2A
fetching features for id: 15B1limEYsrV2tKWeUjAnS
fetching features for id: 5f4LwMkyw5ldc4TCrvDoy6
fetching features for id: 0W3oATzoAqEAkYLuKdAp03
fetching features for id: 3vZyw7jyBpwgIEExgEOGJd
fetching features for id: 2JjYsmbIK6Kh02H5cYWb69
fetching features for id: 3mg9ZQqBTzgPxrQ46nrrAc
fetching features for id: 50Murp0DfSrvrFkiCRODtj
fetching features for id: 3V8IDad0qrSA1fDr6mM8c9
fetching features for id: 20TXnu8uFxHkwrBNTuw96p
fetching features for id: 1WvcgDbT79xeAa7Mv0klkK
fetching features for id: 2uyTuHLAB3unDK1vkjViH5
fetching features for id: 5kXeyV1HFzNT1b3MFOPQ0f
fetching features for id: 3MxovyedGworiVtM7rrE7w
fetching features for id: 3aZxXWFsmIuGU2pS7RxYCq
fetching features for id: 2bnCTu0tOeQCUMGudRe56R
fetching features for id: 4AFLFG6pyvhBLC37FbDwc2
fetching features for id: 7KBmbNZ6JMvuoL74Fw9PPW
fetching features for id: 588hXr5A0VURVbUehZzFHa
fetching features for id: 7g5RXhBoKskemgqGozQIfl
fetching features for id: 0R6PIalA9f5AQgD6DQk5gz
fetching features for id: 1ueDEIC7FyoCf7MPRLSCoX
fetching features for id: 1kWnbqS0pNBX9UNFIdBoXw
fetching features for id: 4vwWoQ2CtxgVTjQqmtsSSW
fetching features for id: 7L8pYSWkHZa0DXOM17rAaK
fetching features for id: 4I9stEiAoK9voUTgYluU6W
fetching features for id: 0RoFfoNIoo8crKUbVlC1TT
fetching features for id: 3kl7f6V6BG31GCtHxusxXg
fetching features for id: 2z0ObRjCedb9HKxquvvpPO
fetching features for id: 0V0taT8CDOCW5sbjdiDc6r
fetching features for id: 5GwXHovc5VzV5k4IJdQySi
fetching features for id: 3RMBmHpLtkfxf3TwdEx2T4
fetching features for id: 0MyGuEPxayhpnKbYu0d171
fetching features for id: 44bYUSTnQmg2FUV2PfLaro
fetching features for id: 69QS5AgPawph9CK9G84ylB
fetching features for id: 6D06zb501VBESteKxkWvIg
fetching features for id: 6HROMYV69bB8qzrAdQoKaM
fetching features for id: 7DKgWPTVrXzfIG6Gv9Cah7
fetching features for id: 27O7f9fp9X257Tg2uIK4Fq
fetching features for id: 4V9WukPJV3VmOuTXMdYzuc
fetching features for id: 1hxZhZlC2jCuADQW6i32zl
fetching features for id: 01ti2HFHInilWhcII7oyeX
fetching features for id: 0XnePuhsh6Z2txXwWbM8dB
fetching features for id: 1ePIjPuiQvrLKKYJhW7dPh
fetching features for id: 6tA3SPyCtfuhneMvqv64iL
fetching features for id: 0ipAdWU3FibeMoNIngFDzo
fetching features for id: 70Ku5YKVxSyueEFiLSWDVW
fetching features for id: 6Pzbv8zwmq31mq998fBG5i
fetching features for id: 7eNM9suHz80f4fbC8aobod
fetching features for id: 5ci1SmCBwodTJrq5zeW4fp
fetching features for id: 0r3dzk4ewWIFyZKeZA0Hog
fetching features for id: 2g5PFYiMnXFxT1zHavYP0D
fetching features for id: 4biXHBMoLHA2dHvyxuIkcq
fetching features for id: 7lKJR80q1kJJXZupkR4ahk
fetching features for id: 1EPDzqgJ2cdVQk8iMjdDMX
fetching features for id: 5p2kF5o1t88QHTy1NdjDeA
fetching features for id: 0elZdXAqbBu3fJGUbzvxqa
fetching features for id: 6GdUQeyYPcRX33xZ5JcUGW
fetching features for id: 5q8TDEiPE5F1AXxektGA25
fetching features for id: 3Z7V4IkrhEKYp1MTK3wwuB
fetching features for id: 5wNJU4z7SqYL28WVw4AreT
fetching features for id: 10maCTtDF9d49etMTnA45B
fetching features for id: 5I0UyqmIzXDcO05izsYeKE
fetching features for id: 5y9ra8I9bXs9EJyo3kwi9K
fetching features for id: 6YqP03ywyReyACoDSVnAHQ
fetching features for id: 4dB7ZcvXFvjqPGXNyOu0NC
fetching features for id: 1DLjHi9qGLiuwtqngEBAFv
fetching features for id: 5uydHeAg7MsIKSvS7GLw3a
fetching features for id: 0mcCiAHdtE3Sn9IqIZRV0C
fetching features for id: 3UIA4xOZMUFJayTSNkbo6E
fetching features for id: 0v1lcmxKiEwhjFxeuK02EM
fetching features for id: 2rPcSFHVaiEkcFQzQZAO87
fetching features for id: 60Bz1S8tq02DT15F4QALWR
fetching features for id: 67CYrbNyDDpMHOdyMv4JAh
fetching features for id: 54Zj49rnERcVrhYxqnSSm0
fetching features for id: 2zDSCcyKhi5pgKSNmDKoir
fetching features for id: 7llhaVBcsIx51iJU6yVidv
fetching features for id: 604A8K85jfVZuhf8tQVTrS
fetching features for id: 7IZT5q7l60htGkvvEOUbv8
fetching features for id: 2Djzx2jL5Yr0zOARwKCSHT
fetching features for id: 1v18GYOXuukysVxsE1kkO5
fetching features for id: 1Mm00HcfXLI6u1f0YfNiVH
fetching features for id: 1ZmXzaVkE505VGAi6imjhE
fetching features for id: 1IzQfNfbskbiT1sgKLbnA1
fetching features for id: 3M04PLUN7teU7cyYqOu18Z
fetching features for id: 6nPr5EzXH7dBqRmXFRA3CU
fetching features for id: 6EgkpbN202me6TNzhQsJD0
fetching features for id: 3opOPdwYUPhQ7EU5Dt3CvN
fetching features for id: 3rGZUTr7tBqpnN7YOXnpAp
fetching features for id: 6XfdkCZNTs5GtSbYLLsaaG
fetching features for id: 7s6xTs93vEOLYFhVpYpVdN
fetching features for id: 0Q7eey8NSPQEQ3ol75UAAx
fetching features for id: 2moq838vdmPDMR72OOhLCT
fetching features for id: 0Tz8molbVbQnUawtdn4u32
fetching features for id: 6eESmZIEy6BWDBLgzidjtu
fetching features for id: 514dJooiPZvtIHrpYSDgK6
fetching features for id: 5AziwA6c8TfKxiHma8pXhu
fetching features for id: 1j0bGiIr8TI4YwBOHWpWFs
fetching features for id: 11fcBJ6r3jauGOdiqPYbUH
fetching features for id: 5a8g5hLCM1komEgKFa6Ad9
fetching features for id: 6Yfj6lzy8D2ycCfPqIbS0I
fetching features for id: 1g4TxcBA5cueNRWKr2oQE6
fetching features for id: 24HkOeTvVDvIsFLpqBYIrQ
fetching features for id: 4nE0cBkiJqScUgchTdvv4A
fetching features for id: 4InqlhMqvsLboA6n5hCYOX
fetching features for id: 5z8DiKSG4EwxmO6gUi12rZ
fetching features for id: 2IKIAQakHzESKJ84cIwFPh
fetching features for id: 1GrsBL78Qk93YfetbzcY92
fetching features for id: 2039TuukTehudI3eDnIlfb
fetching features for id: 5yPdVpOnMBoZP5r1ep5po4
fetching features for id: 38SpiDNzvOW86mGLHAdFFe
fetching features for id: 3FQWctzZxGRaIbzX6YDNPB
fetching features for id: 0S1SXEp7Cvl3Ltrh3jJK4P
fetching features for id: 6CZhzvXECaP2L9TfBujLzw
fetching features for id: 3i7f7WQjzkzf944qQAPpmM
fetching features for id: 7I6cSc7DJiwe8DrT1Ec6ZA
fetching features for id: 5fQEqSNzAkdUhRrgxMd7iH
fetching features for id: 2uRHKB4BYOF2A6a6PZXKE0
fetching features for id: 6UPdUHjoyCdwxH09sJjedW
fetching features for id: 3rp5zzMbbbz71pw5Yh2lnq
fetching features for id: 5CxrCz2zXok6L2rFx4qEVZ
fetching features for id: 30SI8MgoPEfW4bQoMNV9wF
fetching features for id: 1TUgwU1KIVdyCt6V75Rjc8
fetching features for id: 4uCnEM5afg5GPuG0NRT1To
fetching features for id: 4cYxtbTPSjdzjGmCZIostu
fetching features for id: 2WeTJahJRT1vtpS3aMeezX
fetching features for id: 0a2WUVmZzgQYGNagszNSuZ
fetching features for id: 0QHUVpWu5EoXyXHptUaXx8
fetching features for id: 2nbjeVm12uDzSlG3Nywqsc
fetching features for id: 6hlRJGUMJZTNbZmHr0qb69
fetching features for id: 3NcZO4uwZ4gmq83neJr7G7
fetching features for id: 3tvqPPpXyIgKrm4PR9HCf0
fetching features for id: 3BZ5MPUF2BjEXpQBmKrL3w
fetching features for id: 554eZbFJX1IS8S5O2PKPzr
fetching features for id: 09au7Gh4yO6625e1jy1OHk
fetching features for id: 6iFYsQckCCtGsabW0rXEPn
fetching features for id: 1AkjNnyd2aqZRH4NXzP1Gn
fetching features for id: 3WY4LT13Pcmn8C6FXPkrKY
fetching features for id: 48n4F3hkCFyf6nVs79zctM
fetching features for id: 25w19pmXnbdbkXZAXjEZxE
fetching features for id: 2Px4PrbPBMeWB1silbjI5H
fetching features for id: 1vJ8tW29PlkMxJylp8Wjnr
fetching features for id: 7wBfgfbltl8S1ehkHeCQ3E
fetching features for id: 4c9g1LMBE6RMxmI9DcxlQw
fetching features for id: 4niy5jbEb70771kN6GitE6
fetching features for id: 1l90dM1PU5YvAoNxxTHlCB
fetching features for id: 0oeM1wuV8lRBJQg3Emvg9c
fetching features for id: 64HZCyP1pspbAvvLMEitK1
fetching features for id: 4jhARlCDE9K7iwCqCpJYic
fetching features for id: 5Ss0nOlXxswlTJNKZjfFvQ
fetching features for id: 3uwLRf5h1gnMkOfcjocjGi
fetching features for id: 3oMyOytWFz0Ydd0GSfpdcW
fetching features for id: 78fMs1uU2g2k6RNoNCZ7ir
fetching features for id: 138UHyclyMPbTJ2S1N3KgE
fetching features for id: 2uLNcaBpiOFHHE4YuMLlVd
fetching features for id: 69s0vee9JjwF0iMXRc8RAA
fetching features for id: 16rPqhuTKqG41N4tHUGzZN
fetching features for id: 5U20BcJUamdd3InsY4CTFH
fetching features for id: 4f9RM646LGTxwyNAgf7Fc0
fetching features for id: 65X6ApzxPvmXdk6XOdwSMy
fetching features for id: 3mdCQlCi3v2kwl3l4mFlLe
fetching features for id: 6msWzWBvm8vqySnPtgbdR2
fetching features for id: 3Lo39qbFAMXmPJ1u5DStjG
fetching features for id: 6nOVq3zm9YbUh0TRmIsab5
fetching features for id: 3oBrvZe702sGrebzGbcH5Y
fetching features for id: 4ObJvWDuO6ZOUQOxMUurBg
fetching features for id: 5YsyqcewwE0c1ukzHVciS3
fetching features for id: 5A2UEZWbtV9lwLVaabJdr7
fetching features for id: 66HiDLgn9SEfdfLPFHWxuZ
fetching features for id: 6le3hTnCy89EKGEMVdW0yK
fetching features for id: 1iHPrmLDVArrqoUPe83Dsz
fetching features for id: 1zEbpILNjvvdhhK1XHk2TP
fetching features for id: 1vOsIiq80fcYONCUEBbzHu
fetching features for id: 0IIGck0Gbykh0ogz1MjGeC
fetching features for id: 04uW2l7VrWeo3CJ9rQ5D6M
fetching features for id: 5JsyNZivccIDRbtHgofJ8X
fetching features for id: 4Gw5NvssLajGRopHbHQMcl
fetching features for id: 1aPdVWDcTG2Qn0ziCoHkaG
fetching features for id: 20PVWjltSTDX37f6HK89Qh
fetching features for id: 7rJFXtN7AMv9FmbKeKRAdE
fetching features for id: 0tyNR5EE123cytaRp50UnT
fetching features for id: 2sHgLaPuoPL6oTuptJNBQL
fetching features for id: 1tkyWHi97vUK6DzyQLvA4q
fetching features for id: 4NswDMZO9E9pwx4lOwKjee
fetching features for id: 3qjPP3g5q2Pd6SIJg9Bi4Q
fetching features for id: 2y5OtQnC1UREBEiVfGxbNp
fetching features for id: 7sTDRSZIEyxAlwi4yjuv8v
fetching features for id: 6zYmF5WIfsj6wU8l1cOFYH
fetching features for id: 53W0LwOY2nCE68snp8hFIW
fetching features for id: 1EtQ8AIs2pHW6FwFvtYPd7
fetching features for id: 5PnRtThBsAgt1pS3vcTZ0x
fetching features for id: 3GoF1YKAqCnTTpBS7HUnUI
fetching features for id: 66DAA23l2FGxEZlV5ETYzs
fetching features for id: 6OyXFDxfyDGxPitk7qgw0f
fetching features for id: 2XdEbixqhGHM1IuQylqi9G
fetching features for id: 2gQBq8rmNC3kMrh9BuxJMs
fetching features for id: 3kxeWFs0yCmoC5oqOsEmoy
fetching features for id: 2KcmhgLLm5OqRmhefG8IRf
fetching features for id: 6ud44OoRGtmMAkQQVkEgZt
fetching features for id: 2QcP1XPIx69G1UfYiBeuDu
fetching features for id: 2d1NLWc2wHDZWjKkgIjdls
fetching features for id: 0KZAXCYT0yOFxOI4d9jqnC
fetching features for id: 4WhT2YADjcU58IUgroWZQZ
fetching features for id: 5J4cszsX6SqlszINJNitvm
fetching features for id: 7j9z4xxbrlks8oBNcmdYkw
fetching features for id: 1bDnEeHjjbuKrXIGOgOjIf
fetching features for id: 3PY3ENkgYigHIWLsuuXmJ8
fetching features for id: 7Fr0hIabz70l8sMbjng9KS
fetching features for id: 0tGvjiddWph1hYUbdZ7u6Q
fetching features for id: 2hXyd8bqs5MS3wPGR0NsHn
fetching features for id: 4zmaoY64dO6PUDRL6pKtHN
fetching features for id: 16ildy0iyj5FkxSacraf8L
fetching features for id: 6wdKjjOcNsTjzQqK2q6vJM
fetching features for id: 2zKlMyRTWA6oFdH8Zg9UBV
fetching features for id: 7r0WdJ2mojWM1ylFLwdERY
fetching features for id: 4Mrfq1W2qM2BGuIm1JSrLP
fetching features for id: 3SNrnu1yw5IftmNQvmexGA
fetching features for id: 5jPBmIFbjnERaWeickvTkS
fetching features for id: 585ZObjzGQuYHXcaLL1GGl
fetching features for id: 0ZPDk6tgiqMgtMoRQ7iYj1
fetching features for id: 3QNrN2Wzl0KshnGcR1VJj3
fetching features for id: 3zjDQSxu0WmlfLc2Yq4RdY
fetching features for id: 4M24a4FcmIkuCGjEZ3MZMN
fetching features for id: 0WzbqGdkyj7VGfQ8Nwl0ka
fetching features for id: 6UjIXfHxvsI48NFsaXTXgk
fetching features for id: 31YG7zS2G3SnUQUntTpxJL
fetching features for id: 66aINzxJt4gG1gPl6ED3F2
fetching features for id: 48Wr6FLDXMuJNUhyX9o5rR
fetching features for id: 5IJhLYBgah78GpBU66RJyx
fetching features for id: 12KirOpUDcCAjySIfqxSng
fetching features for id: 1wW9CGapya0Fp249uOQEfv
fetching features for id: 0qwjHNJHirzB5VsYQ4gjG4
fetching features for id: 5NNXHQHYUhNl46k47MyTAb
fetching features for id: 4DsbXVlHJfvvHAhg2WfPO9
fetching features for id: 6s7coadGxGLWTqC3J3I1Gg
fetching features for id: 5SeRewBBf8Jf0DMaguG66I
fetching features for id: 3piobnyJRHaoyMRvFORrXl
fetching features for id: 3PSE5zXhf70R9czOlfgWeM
fetching features for id: 7rGOWzo4evAQQ2FzrSKp0B
fetching features for id: 5kztrigpAlJzaadO7IJkLv
fetching features for id: 0pK0pVV6JhVDwUJqneoi2V
fetching features for id: 575X0VcOzaWcjJooXxpw7R
fetching features for id: 7C6CVYJWFfJ2NiXtXfVAI8
fetching features for id: 2pSNr8OQkHUBMEOP16SquX
fetching features for id: 2E9vbRYksg5pcZVVzJ0t4d
fetching features for id: 22JZcAjrAlrrnmlMKzLBBj
fetching features for id: 5NIb0uP4CO3ckfyCIjjcFx
fetching features for id: 3K0sJAEu0kbYyCllf8yRyv
fetching features for id: 2Du6IbzikdqDOSYLJQwT12
fetching features for id: 2KmVjbL5ik2qkLZvkxgIps
fetching features for id: 1Tz6nZQahJrax0bZCh9MCh
fetching features for id: 69X6N0XmY7426yHlP1sGzj
fetching features for id: 60hqsO48PgfLXbr1WD56Nt
fetching features for id: 44wDEwUCPoRdLU52Nn6lwe
fetching features for id: 28P8VVw26hS0H9AN4iPntx
fetching features for id: 2hhYxAqLdr49NaNFuifPAa
fetching features for id: 7bPkugM3pcS49I0b1UJP8Q
fetching features for id: 6YPsVqkhl1JIvZFhqSO5Qy
fetching features for id: 1MW7hcqDWh0ODZA5hVunaO
fetching features for id: 4dn6801JBk2Z0K8RfIO4j4
fetching features for id: 33oIra4uXFWu9V0zvjj5Xk
fetching features for id: 7ypZInqer5wqFTsZoVLCpN
fetching features for id: 5nU6C8jg6f12N6lYNbSgkJ
fetching features for id: 1K0GSEIFwIY1mn5SQNMFSr
fetching features for id: 6Zjg6ivQ2HdNs9KQKtnH8l
fetching features for id: 1Kv9Inxy8zPZCQ3RkoVMF5
fetching features for id: 23mgC51wSWICSQq21Cq8l7
fetching features for id: 2PWQbNVjTfZMk9KwY8cpxy
fetching features for id: 1VCpk2RhA2ZYVnKEf6hMXg
fetching features for id: 4wa5shE2P80cziwF2lPDJi
fetching features for id: 7kPBXSztfUExKSKmf8uvmp
fetching features for id: 2bTnyC2hJc8U5mXcCL0Mtf
fetching features for id: 1LEJw4cJAxgItBzA1nTJzP
fetching features for id: 1dmTbwG3fwLD7IkRxhMWpf
fetching features for id: 7tSmFPTckorF12lsbXWBMY
fetching features for id: 4mnTsJuNT4esYmnHqGMxJd
fetching features for id: 1lSxosAXNmtgNBMZfFKSpB
fetching features for id: 5y2FnGvK01cIOqGWhCN5PR
fetching features for id: 4rbUVrcZ2qX4ld0jthoKaZ
fetching features for id: 57rCNFS9Cfdg09dNHSAuwM
fetching features for id: 7H9mWUsWJR4tIpzvoSih1k
fetching features for id: 2b0Krnn7gdbfPq5cCT7EBH
fetching features for id: 4bHbFiX0JA7rrHVZd423BB
fetching features for id: 0XMO5f7ltVvFGI8DKlf01l
fetching features for id: 0IxLULmLG5EOhAModzmXAh
fetching features for id: 5a6KoWEzy9GengCXiGXTcq
fetching features for id: 7HxGQpDnCpf7Pkb2D73M8H
fetching features for id: 71tjsDvB4EMJqNG8EMmFnb
fetching features for id: 5tp6LCB0eozHSqGT6LQgn9
fetching features for id: 148uYmn7Ua5SRTlgveflqE
fetching features for id: 2gLUnXzjCuGnbXJ0OUGOuA
fetching features for id: 7qDKFStZHgNQhFXKVly1CQ
fetching features for id: 32ISCMMXXyiRNMyD4Aaufs
fetching features for id: 28uxJCh4pH4inTnDhqIFVY
fetching features for id: 0mG8Hety4RELeo6p808fcy
fetching features for id: 7DTdBIwbs05LCEcxnx3RRQ
fetching features for id: 3fDsObAq6nHvxPWJOWj2vJ
fetching features for id: 3Lj6rIJeRhQoiZp765IYUM
fetching features for id: 29yILdocQZEy3tmYyegizl
fetching features for id: 0SQNQbVk8gVXf0EsTrb97R
fetching features for id: 1h2dvcIykCReWSZnsKBgKg
fetching features for id: 2Vb4lcajzhzi3FXn4s4FEv
fetching features for id: 3eDd2yAKGOmFwSiGweEeqB
fetching features for id: 5u2LEgoJU42OiChNLyKcwt
fetching features for id: 2sQ2ouPP8SAjsIFoz20Xr2
fetching features for id: 4iobXHdCfshPI7XAUTHpvJ
fetching features for id: 1ntld9r5VD1UxHYEO4PBT3
fetching features for id: 6EanPjcrgRrLjs23SPQAaa
fetching features for id: 72Z28IsvEVLjSWdUKEQgZ0
fetching features for id: 3BvUSGMOa0RsdsOXEAEgQ7
fetching features for id: 7BxLnEePdpkkarWd3bijfU
fetching features for id: 4po5U2ZdSQCoTPGpGudx71
fetching features for id: 1YQbXMi6ne8GnXD2de0d5y
fetching features for id: 1D2LKpYa2GwDzaTCS72MEb
fetching features for id: 7kMfhQbqicEJSk0UlKCScj
fetching features for id: 6swV4E3xPi85UXNjPVP2qI
fetching features for id: 4kW4TwGRdhAN3U8mp7BBmG
fetching features for id: 3kyDtb36csVJe1RdNxonr9
fetching features for id: 5W8lR8NrVayLlLC2PSuV3N
fetching features for id: 3GA3PeOsy9cXqj4tRbPRS0
fetching features for id: 6e6Kxot9nHyZ4I8GgmGKII
fetching features for id: 7qEyGZMaJSmRtyu3lYxnUA
fetching features for id: 6wZ0ryerZL99oWdItvyoNR
fetching features for id: 4EKhJzaAkrlpamL0SDrSw7
fetching features for id: 1PDPPfQjRulZSbbIUcz5v1
fetching features for id: 5Z07bJAwIfoNc9R140nKjQ
fetching features for id: 6wDZC8YZ97vV2hBxvx3UVt
fetching features for id: 5h8wkSF9tS0Vvv4lbf90sq
fetching features for id: 5tINTGsjnZBj9s4bCoVxis
fetching features for id: 5Vx3vPFnLjwW4yD6ReZPMv
fetching features for id: 6fPyisGwCjkJYB7r5AD921
fetching features for id: 1N58xQLv5FZ6LgEfcxVByP
fetching features for id: 7sgZ29LuSDARfMQ3YdhNo3
fetching features for id: 26tY1oOrCnLsPHGoayAADo
fetching features for id: 7fBJUr0gptVUnAu6cv0N6m
fetching features for id: 2vhZMNLULSh4Er1O3qrBr5
fetching features for id: 5UhOp3HtNKv10HO4zTeS4s
fetching features for id: 6dSfyyuTVnquyJW4ObN6dj
fetching features for id: 0infa4Uv31EQJkJ6dXWJ6A
fetching features for id: 5iHC3iL5tOeHjozKJ6A01N
fetching features for id: 0MpZSu8U7wZHKG2HNXoT45
fetching features for id: 2URDMFXkfwgXSYL9kNWS6J
fetching features for id: 1v1vy9wS7LoRYAyBE67rEZ
fetching features for id: 7srhHSp2T99MZGF108BM8z
fetching features for id: 4jmLzvN8QG9V7C06REIMpf
fetching features for id: 1hyQzr1sXr9IK21ccFlwA5
fetching features for id: 1rFGwz7rswOuW2IdXPd0fv
fetching features for id: 6bIVxvmTWZdeIYpY5Qpc5f
fetching features for id: 2Q7HyvRr2lkoDcyD0rBrwz
fetching features for id: 6VAjaNm8MRPzg8dlCzNpF1
fetching features for id: 5MVlFGoBArEqXaxT9Caock
fetching features for id: 3cdUuaCRPsHenWNBY3LJvj
fetching features for id: 3InRteVTWoresD8cmdVxJW
fetching features for id: 1E5cZzKmVgLLJcLI6HPbn4
fetching features for id: 32ZIsOItLF48D3NkMOVeru
fetching features for id: 68FVTThRdUSwr9SLsGZsUk
fetching features for id: 61PT35m8YfPJL4c6NBtDgL
fetching features for id: 4eN5w9KeNWQGtvpnzJoxX1
fetching features for id: 5VsIsLz4LWBKwmsrHayGLt
fetching features for id: 4VXtEw18Y1BrZ3E5G5ipVp
fetching features for id: 4YVn6eoXNhmJjzlsdxhqc6
fetching features for id: 1GiwOMG9LsWAQPV5OziLeL
fetching features for id: 0iiWaJLX3gUtHszswT7uZF
fetching features for id: 70VBpCcpjJTmtQfVV5Pm4B
fetching features for id: 3iVr0ys1UuBPkV9whnC2ER
fetching features for id: 4gaGX5o4pe27Z6Z12R8x43
fetching features for id: 09EOb0A1DBETowU7tODmFx
fetching features for id: 2SMKd2T7c7PB2IauC324D4
fetching features for id: 0x8M9S0YZVRCIshCQHZxzk
fetching features for id: 4iZI0Z4A7gSnWGkCSzHjG9
fetching features for id: 72rPPmZ6pLgdYyqwLVGiTH
fetching features for id: 68cErZ0aBcmXsWN6GSDBl9
fetching features for id: 5PXh3ByMYeMxXu0kTguS0P
fetching features for id: 6AlHNhSph2BURliECGQCUo
fetching features for id: 44gBeSPEmOKRquHKQENkkQ
fetching features for id: 4epoSl8BTGb5yfvkqdhyhv
fetching features for id: 49g2mdvm44Nli8HjYLfanl
fetching features for id: 4NN3TFbwguBZAbQ1N2Kk2z
fetching features for id: 18NtcPaUBR58Lq6A6bMdXf
fetching features for id: 2c2vQsTkOEzUWTMwZcciqH
fetching features for id: 7icUts9tjPjhY70XZ7j8Wc
fetching features for id: 7phNHUv2ElaycVYwGvXD78
fetching features for id: 5XOKpohpxwCG6xMvTTvDxe
fetching features for id: 2ZFr2DExEpycpuoSlx5Loq
fetching features for id: 4oy4gZOLCTwMh2FjNo5RQJ
fetching features for id: 093OJWDHPlIiyBYBr5lvFz
fetching features for id: 7dsw58DjDC6PanKffWdr0p
fetching features for id: 4NW9pE6koSYQ65AfqaQZof
fetching features for id: 4KmaO186C0dNwLHYa00Esu
fetching features for id: 7ntXUTgbm0PHd68xlS7QOC
fetching features for id: 58ojWS1EGL6z5rEOELtJmH
fetching features for id: 1ehJNXPYU0Uq1IO9KCJJbV
fetching features for id: 3Z05xMKFPEwR8YadgcDPWH
fetching features for id: 5xUdcywqjzg84en1kHf1DI
fetching features for id: 3oYog20P3yIadymvzWSIZt
fetching features for id: 0Kb1OykZK7OYNqQ58KZ0rj
fetching features for id: 4O3C5wHN2tetlVZEZ7V8aG
fetching features for id: 72EbrfH8w9LRJQF3rz5Ivg
fetching features for id: 2YcdopKk8LZBo9Ffoh7LEO
fetching features for id: 6ADpATV1CEeBzOrHtklKdW
fetching features for id: 2QEDCc7EGEtU6s2oRALM6r
fetching features for id: 5MfmNO06o0t10pgIRYtMJq
fetching features for id: 7pMGd8mk3kxnlvHwrMNWPM
fetching features for id: 1UqTerpEPPXip3bL9P5yfW
fetching features for id: 1EVmIILR92aM4OtRf8zglo
fetching features for id: 5l39ITnk6hh5dPZUuhtuei
fetching features for id: 2RsosgFuNyVqA7O5K7SIzi
fetching features for id: 6jjXwmG1czQP9krzlFsaDw
fetching features for id: 7EME8xI4YLbUUT8qVKgBoR
fetching features for id: 2ajr5SRpynlFAxG5e4UgVz
fetching features for id: 3zmZYAHzV6hz9XHrDB1phU
fetching features for id: 5LL87PJwawxXLLT0obIg1w
fetching features for id: 595JEfigekDXJujHrZXjRd
fetching features for id: 1UfGX3eL0ifA3gJSwWPMBi
fetching features for id: 73pKr3QfG3uP0QW8X9Ubgx
fetching features for id: 4YflRqjR3TF8m16qh5e7UU
fetching features for id: 0E3gyl8y7IQaxft5gEhvbv
fetching features for id: 6wI57Jgz7qLaCr8PDG80Gv
fetching features for id: 2b39AQLnuzbnfmUc7IB42Y
fetching features for id: 7GjmzKbriJewFNDPJtVjey
fetching features for id: 4dJBODV6g7zryS6HpszS5M
fetching features for id: 01vU3aIv30DTDyboxD8s1i
fetching features for id: 7xs6gLM1L7UBmBefBawVXC
fetching features for id: 2yShRZBSQ7Pa7GPrdQBjhy
fetching features for id: 00vH2PsEQTGRyJYhyIyDbr
fetching features for id: 4PeZdZt3Mf46SfsL6Gj1EY
fetching features for id: 1X594RXrF87lumS4DbvnJk
fetching features for id: 1Zm98o9F5fTIOGRkj7iqQj
fetching features for id: 0cp4meeLcHubVHMnl1NJRm
fetching features for id: 6n9a8OzOg0LjASXFVeHQv9
fetching features for id: 5t77d32zXEnD1pskSkWjVZ
fetching features for id: 3ZihHp2tqO0D3hmlb2hbVC
fetching features for id: 6ST7lA0LHEzG9OFqoEfdy5
fetching features for id: 2EFuKrTmy6wCB9C7Bvyyia
fetching features for id: 2qi7QEyfxL2Z2ez9Rd51e0
fetching features for id: 31ip2wUAsK5IJP4wux6yXD
fetching features for id: 2pjIyyG8cfiOBKHY0q9kFT
fetching features for id: 7jYZxrUX7gu94T3xEjwAmn
fetching features for id: 1USwd0VpMKlgfMYmdrJJvq
fetching features for id: 5fnlUDWCW3wgoiaCjyfakI
fetching features for id: 4JeScz5yRYhA3xlVTpNeor
fetching features for id: 7ACk1x9kaXkqwdooclmdUR
fetching features for id: 28C0mqGkQlWfIgOZLrKlnu
fetching features for id: 4E2pWFNwyYVRiPzyLI3Izd
fetching features for id: 37Vct7oArn3tHN3XXCJJ7p
fetching features for id: 4x0sdtyRjJ9lf6pmsa3c03
fetching features for id: 2UBCsvPvDbfLhQMzwcDtrJ
fetching features for id: 2b29IX7VUkChXruI3pHKnc
fetching features for id: 1oYef47TdXsf8ssv8U1yzb
fetching features for id: 0JxQCmVhndnWtvNoPWgFLu
fetching features for id: 1mxfzeVpmlAiRAcH5cv3ll
fetching features for id: 4E81iwmbzxImJcfbzvMBNs
fetching features for id: 5EYxarXezOkrieJr1LNFP9
fetching features for id: 1JwNYwuXPcFyOcv0s7b5Cd
fetching features for id: 3Id8znxIUtTYNhZsak8Unb
fetching features for id: 4GuVS8n2zskeUADMFJM6zL
fetching features for id: 3AdI41iDyFCutqLT6Lt1sr
fetching features for id: 78mqMkOHEqVNtVCnPj8o2O
fetching features for id: 6ZsJqksJCvEvKlzlCpIVE3
fetching features for id: 54xxVmu4bycyQ7psgJdzrh
fetching features for id: 02Cr2z2dPQ0RKCmgZTQgRq
fetching features for id: 3HcYP0UHeSiRaPbhsXT0bd
fetching features for id: 7bEW7s8fzVBkRT892FKnOg
fetching features for id: 6UPnYI7fl4kJngseOzbeBe
fetching features for id: 6UWS2yEQck1K7QynHWRuHR
fetching features for id: 3WgjedKdtPCwsA2qJ3IK0o
fetching features for id: 5T64WGCfoQiwXBtfsrlIlk
fetching features for id: 1nDIuGfjm5RIc8MqWgQpJR
fetching features for id: 4ILZLqZiTdr9rIKqvM7NUS
fetching features for id: 2bjS5uN3J3fqSRuedR1OlA
fetching features for id: 4X5USMkTYXSIcP3Wu6RPAj
fetching features for id: 1f7tW1eUo3KbU2mvaioPGI
fetching features for id: 3WmLqAHx29dL6H4DhQ0qwI
fetching features for id: 2n7Cnh1bxm5I8YRuCu2f2t
fetching features for id: 5Nv2kTS0kdwMuI2LvvdVaP
fetching features for id: 7y6CCyVMBw4rVjMdd909pG
fetching features for id: 5M7qj3MkvGK36ZAPP89euN
fetching features for id: 6uovV5Jae2F2ca8JhVcRcb
fetching features for id: 3drp0RrqgeVxTkcukt0jGM
fetching features for id: 4s17d1GNitCCI205nblMy9
fetching features for id: 0877I2tK4mGWCFfwXsx0qy
fetching features for id: 3uJj1t0OWMxbgFHLY5Zg5Y
fetching features for id: 5uoBVVLnkKaTbCyF6mMSTn
fetching features for id: 1Fr9lN5agksbmJYi0qlWmG
fetching features for id: 35sxFUaGPAAcILFl80UYog
fetching features for id: 1SKlsmoCYFdUSB9L5elxFj
fetching features for id: 1nFa2nnyPwjeAIuJCfFCjB
fetching features for id: 6Pi8qqxac8KYv7QyEfgHUm
fetching features for id: 3s6pnpgC7aDwR6tSByPpef
fetching features for id: 6HePyAmu0BGEI2uKnPlfZY
fetching features for id: 7FTTN1gQKi8HnM89q3Mac3
fetching features for id: 1jZ9zwqDTfR2zRVpPkgetU
fetching features for id: 1byudRVLvxOVYPLOG1xqt5
fetching features for id: 6g4Oyy8IwraOjPl7nqnROq
fetching features for id: 37T6DBXi46eyd9xlQnUsUX
fetching features for id: 4hZGQQRphhGdjyMlJINBDI
fetching features for id: 33aC0VE93PEf6dAqYOobRN
fetching features for id: 15zciFDbmEEdehl84mZC5u
fetching features for id: 1ifMS2aM3l9IA8u0xzy0FH
fetching features for id: 50O2i8HRJIeBtnqpqlqRZh
fetching features for id: 23YLuLDEGv9KRrQvEvwCxJ
fetching features for id: 6J1HWy2pduS8uK2t6Xs0q2
fetching features for id: 49l9Y5FbkOwKFLFbc9ArD6
fetching features for id: 7xlYVBERqQ6r76euVrL9JS
fetching features for id: 4U6I4CX15zcKLDSCgc0Eab
fetching features for id: 1hQ41No89ehwGgOnub7CTv
fetching features for id: 7BlWK6ZJN4oYqlUKZBz4q0
fetching features for id: 0I4GId05Rsx9367OcyG5ij
fetching features for id: 1M1dfIMTeh5b0UHqhNeCsA
fetching features for id: 3FYjQxzABtEXtzrnm3DK1B
fetching features for id: 2xnZ93pkD3wc3XSwdHbT9K
fetching features for id: 5s2KAVm8MuFolrs3vO59PA
fetching features for id: 7nP4dbZhIjMrBmnvSS1f0P
fetching features for id: 5KRhZ2EezIUnaAuTWTS6uY
fetching features for id: 1pT8Q13wqYFvGzvbpIMsqc
fetching features for id: 395ql4WGgRleaCZmGbJ0Ny
fetching features for id: 7KyakYezb7yyuGLcAnfi4n
fetching features for id: 1u7zJy4RUSXclRhswkA6RT
fetching features for id: 4ioR9ZwpWGaCtwaKHBfQfH
fetching features for id: 6xAKCAHwaAvw9rlwAv3lkD
fetching features for id: 7iDgqnNiEjCLMcvErfT6zT
fetching features for id: 1wkwJJz2zGDPZoXsxok4qG
fetching features for id: 5P6O4yi0sZ4JUOJAlmZvxM
fetching features for id: 4qNsm0544eEStb6V0TAOf3
fetching features for id: 1Ibjmb8gV1RERi8GNUBGi1
fetching features for id: 6lEZnUw7X4qt5VrdXrAYp9
fetching features for id: 5WRxN8I2Ub7i0cABMljTZs
fetching features for id: 6cPq5rCeDUbgELgfZPrEUG
fetching features for id: 1cve4tX1mLZkqA5c6Wf6t4
fetching features for id: 3TFSEVxjQabU96NbB5a9PU
fetching features for id: 5TGKESXpi2XExg93Mu7Dbs
fetching features for id: 6olpdSFw3MJkddyxtAs5HQ
fetching features for id: 6krfYZxsSyc56uCXC7CKn2
fetching features for id: 6h12N1HCsDbrY2uY0NHfKb
fetching features for id: 6BLqrmuRTePFJEbmSNCilF
fetching features for id: 677oYgkn4m90Zvq4lQH2B8
fetching features for id: 2gpLU5dhDI62Oo5I3kzJTZ
fetching features for id: 3n2EQWAnkXXdsAmkyYLHrZ
fetching features for id: 2nBtCbdNNZjKML7ELCN3M3
fetching features for id: 2oZpnXGl3Juka15OgZ5U9U
fetching features for id: 6EAlFsajDK1PVgfhdJbS5Y
fetching features for id: 08ndiMccc1wveOaaKQW1Ot
fetching features for id: 3xaObh46jUQsaPeqg13Szd
fetching features for id: 2HtdXC83swft4uG3RoRzU4
fetching features for id: 0ZKlLYAoqgocP7auLhMqRl
fetching features for id: 3dVZsNXZWhBDUSjV2RkEWP
fetching features for id: 1FY6i2ypBOzsMHU2mWXMA9
fetching features for id: 2brxFq9YHrI4v6RGrocn5X
fetching features for id: 2ykx6yAYgSze3BJWzZ0pLl
fetching features for id: 0tKx5Acv0WmSxJWhrNcpAa
fetching features for id: 5WQV0EHAxgY7QOYP4XOoqa
fetching features for id: 15e7F6w10JTcuRcuR27QlG
fetching features for id: 1nL8geqtGBTM1u9FVNxP4A
fetching features for id: 3gzbPqfjQKHTUm18u71cIt
fetching features for id: 1t4xcukIOdztApXm67avzA
fetching features for id: 5XbZjYB4z9OIJhutiDJeSV
fetching features for id: 4QkvXIZGRmR2k7yduFFj9L
fetching features for id: 6Nd4YWtScMzkucT3QQ3a0F
fetching features for id: 6R0krjcydjrNA9WsycpSat
fetching features for id: 5nZS7VzWL7rheLw46zdtKl
fetching features for id: 543Qtjzov7A5acT4He5yUt
fetching features for id: 4RAWjh2sQvEOWimtJYrA45
fetching features for id: 4CORfI5PJIhyMw8uWpDfKz
fetching features for id: 2XvCndkWhupzR6a6dD8IIG
fetching features for id: 6pYvB2TQO7oGLw6oNV40jm
fetching features for id: 3vYcrJAAxldOq8Ans4li8w
fetching features for id: 41p81P3ICTMTcTAqg5f9mr
fetching features for id: 7LaqQ8S4kwVt8nKVHT9ESD
fetching features for id: 0PdPDTaW4ld05aiOxs4JRy
fetching features for id: 67JIfYdnRNXSSP1uPRmE2j
fetching features for id: 3zjM9AxsRrzdTHvT9nhNNK
fetching features for id: 3IAhEm8TIChChdokqgG2US
fetching features for id: 6nPFRCNTVxvLbhdUZjahXU
fetching features for id: 2kUHsxQhgocvFj7w162aC4
fetching features for id: 769LeZbzwUhQ0YbcoMgGGA
fetching features for id: 1o8b7Nn4cpb8MneytnwUVm
fetching features for id: 40yDIZa7ncHhfeMTFC73nj
fetching features for id: 75ZlvRqeFQy02VFklspFdK
fetching features for id: 2fcdqoR6PsHuOlAy4r9VDh
fetching features for id: 2HDkF217qwsPZmuMLKYBvc
fetching features for id: 4YCnTYbq3oL1Lqpyxg33CU
fetching features for id: 01OaqF7jurjEqpbAb49Enp
fetching features for id: 4bDfHzfDotB2o3y4AetBN0
fetching features for id: 5I8YPXA3U6tnYpFNvchUeb
fetching features for id: 7m44TrK4qxvhRUh1vaibHL
fetching features for id: 67xTBZLMacOVnrMHSJlJWx
fetching features for id: 3Eu2gH6F9R6WUQw3O3f4lJ
fetching features for id: 0tgwJKFJ7Itl5d386q5QkJ
fetching features for id: 248Jb9TGzk3Rk0OaNtUGol
fetching features for id: 5pgq6Pzyr8iF2Rs7vzC1nd
fetching features for id: 115WODgX0QxVa8Zn7MTSk2
fetching features for id: 29W8uzEFa8e21w4SL7ozUs
fetching features for id: 3a0wCcSqwgBflU8cYs2ziR
fetching features for id: 1bZnNyr6uaY7XbxZfO1JYs
fetching features for id: 1T5cdFiSyAZeOQAYS6rWyD
fetching features for id: 3dUcm0TvbLkJvS0MxeA4EJ
fetching features for id: 4FSKLW5bdV3StzIyGtUvg6
fetching features for id: 2Af6yE62PwymxZV0962qul
fetching features for id: 2ZRpDCGMU6c3NvHIrZrpue
fetching features for id: 3XhTHMleuyoQCP8jxOeIhD
fetching features for id: 4IZBzpxPNFmKP7I6D3qVH6
fetching features for id: 6VCeTm0H5ShF688xcrksaX
fetching features for id: 7IrdWjYiRe0NiMFAYZX9IV
fetching features for id: 7fb7gsWUEmFmJWRznlMwf9
fetching features for id: 7H1Eqhac5EvsokLoSMRogq
fetching features for id: 4ufiwgOtX1ceUsuTdcKK5f
fetching features for id: 1wlPVZpAHotYmO7JRfVkGx
fetching features for id: 4tkxuXxeKBeR428wU4XfB7
fetching features for id: 3iOyyRe9UMmjjMh6lHquRt
fetching features for id: 7hCuUFGz2LYsyUkZgGFDS8
fetching features for id: 0uPj39pnV6IU28BPWIGDYU
fetching features for id: 2AvLQRGacGcxZJ0GzzpRls
fetching features for id: 1e5PUoWQLWYWy9FXPg1RSb
fetching features for id: 7v4R67QJ3JHkEwvs7w3tg6
fetching features for id: 3WjvOjzfkbaoAqshjIAtsp
fetching features for id: 4OuL5STprD6xGnjIhMDpdh
fetching features for id: 2dDlDQVkmeQUuZJ4q0TUYH
fetching features for id: 2KYcLNpOQ3KmoRSKmAjhpj
fetching features for id: 5hY7BaoT23IEirvJwqOCNF
fetching features for id: 1yFpMhwrlUHJ9BuULu6PPg
fetching features for id: 1VFbDxs2miKl69fkSQ982b
fetching features for id: 6JS0rmZ0iRaX97cV85GJVg
fetching features for id: 4VlhAIiD57dziRTObNmERG
fetching features for id: 1TlARCew3Q8m3farPiLOhq
fetching features for id: 51CtX5fezyO0khz7fx6lpu
fetching features for id: 78Cg7PEJ7zEvhZQUdiGktL
fetching features for id: 6rUtOE7BMX8iQtyJNxTUDO
fetching features for id: 6UUjdKSv3v7Aav4P75qUKR
fetching features for id: 2HzOTwIfaNsdsr1KVtuID7
fetching features for id: 2hzQdu0eixbO1LAyJG8m8x
fetching features for id: 0pcQb5YE5KG0i4AMWaEfgL
fetching features for id: 2F4FNcz68howQWD4zaGJSi
fetching features for id: 2Nff3m7wldOofrtSIu2n4e
fetching features for id: 4IM9fsiXjM877LYrIgPfYD
fetching features for id: 2ZKoeiRGJUgdx79sllP39d
fetching features for id: 4KKQMQD6Q0TbqFlioQHPIy
fetching features for id: 2s1NZyk8jRQc5GFqvOKogj
fetching features for id: 6lyDqcj3PbLRnkWxUqJgqW
fetching features for id: 3wCYTAYVMl5t5QceSHU9GL
fetching features for id: 6DPTrgqQSUvKf1UhBzRt5O
fetching features for id: 6L3TUUw1x872w7nEFVXkyC
fetching features for id: 7FbwvPf7Vyd6ZRpn10jKVH
fetching features for id: 6oebo9Zd14wvVXD4sK5Oxr
fetching features for id: 1DkKuAjfuTMypBMQbauk1L
fetching features for id: 18yKdEbVy2LoIlvSbgqZ2m
fetching features for id: 12AcHr2Z7yFHC4vzZnJwut
fetching features for id: 0ngxsPeqqodvjyTZy9NRI8
fetching features for id: 5edxDQC7eBbaz0hqkV8Y88
fetching features for id: 4HT3clnfZI9RgY1Bmvu1Oa
fetching features for id: 5QhPXM0QVkHfNARSwQByF4
fetching features for id: 27Iit0kCpobHouAgwjU9zS
fetching features for id: 5GwlaZYksclv2EMmRzSXof
fetching features for id: 67Ysq6ID8LYJWSATep6DdT
fetching features for id: 6tGldcIJJwByOYCjJglMAs
fetching features for id: 3PSyChiN8udWx9rLGDk2Ym
fetching features for id: 134qLCzbDLIv0JMbEVY68O
fetching features for id: 31fuTDBhu5rlU5Mum81eoG
fetching features for id: 5Qxby3z8hxWLtGGcL6aKoW
fetching features for id: 3fdMewsb6zb2Kdkl9KWZGc
fetching features for id: 1fQK6JQTxImiNG58aRgUgR
fetching features for id: 4BnAInfEjo8EJySDkXoNan
fetching features for id: 0Zkuewgtp8XbeJ5xInG4p8
fetching features for id: 2CjVRHeMnqekSKNhBoVxZu
fetching features for id: 22ukRZHToZUoICnBmT8AuT
fetching features for id: 5gykfJ6hyC9WonxY0aF8hI
fetching features for id: 3K5ITgUFrFIWgEDM4kBcOk
fetching features for id: 43SnuhthBn61wC8535oWnc
fetching features for id: 0xDBb1glVyuw0fn0VLsKrM
fetching features for id: 4A8uAIuT2eJkK3GzCAsYED
fetching features for id: 5wTPFI9gsJWnHMDl9fwbIh
fetching features for id: 1o9VePckQ360ICcIl2Kil4
fetching features for id: 6pF5ZvmFOrb1Afn3Z5pdUn
fetching features for id: 61uAY9xdzYiIbYJ07XvKso
fetching features for id: 4vP4YOUxTfZKwAXi68iRQR
fetching features for id: 12qaVv86r6MwvmG6A9ZrL2
fetching features for id: 5jiM4IeoXcEpUFNLiVWwy3
fetching features for id: 7BvqRkfYFFzDAnNd1eyB0e
fetching features for id: 53AqEQSKNcY9FGUFHncmxs
fetching features for id: 2Zuwqmxzgvrnb4xYE8RAyi
fetching features for id: 4YGB5SzMVQJAB7gjsxbtQR
fetching features for id: 5LIjieDz2qXX0OCegoBmPS
fetching features for id: 1rDwkzgp1ezOnpatP0YZYM
fetching features for id: 4DL21qfPuB0c5ZNr0ZkDrm
fetching features for id: 6YosVXlnWfD9zqLyiQFA7U
fetching features for id: 64OEadvi2rhN0Y13sK2gks
fetching features for id: 2UTO3vuKleiyHjOGX1dppL
fetching features for id: 3kenvAzfISpc4Y6C1GwHlh
fetching features for id: 5mj4f31SXtotwciKLcpUPd
fetching features for id: 41P1mrjKY2e0jaWKjjqkEh
fetching features for id: 1ALENAjlyDwQybhEinNfqb
fetching features for id: 3R1CKeQH6h0Mi9ViW2Id93
fetching features for id: 4qhcrhlxMfvRcMi4nablHV
fetching features for id: 0B1cm1Z7J9v8Wug9idr8T6
fetching features for id: 5kxBWHsXHQKiu8LjoNf2n2
fetching features for id: 5Kn1PO4WCJlfdLU5j2OaZX
fetching features for id: 46CUlBVTKmG8phVX0FcEEQ
fetching features for id: 5kpc3WPbRAinVFMlSGNcNh
fetching features for id: 7BywmFdliPDRvotH59zWTQ
fetching features for id: 0RtKtNBQWjgIIxsXmpSRyW
fetching features for id: 0DdV25VgZvOjOflFP4Chlk
fetching features for id: 5WkOR0PQIE6ODgShdxbBwL
fetching features for id: 54NFW5w4rImNBQxo4dbAyQ
fetching features for id: 3arpPGMRj2lEGYZPEjSWQv
fetching features for id: 4dfn4XrQIUAgUU96Ag1Ref
fetching features for id: 2DwQtUea5JUp81XpxFvt2l
fetching features for id: 79H73UE3CpqhIwxAJaTtiE
fetching features for id: 4N0r5srgTtFpFOAkqc0VmU
fetching features for id: 4MCpmOHRa5tlUo5EQaCjjJ
fetching features for id: 3cAThLgpx8ZjQ6jTguWYkf
fetching features for id: 2k4k2Qytu2ZnYoqDOagNBV
fetching features for id: 0EqzIiEAW95n7HqRu5KhQE
fetching features for id: 1LAK4k1Bk0bKsZgamDGM5U
fetching features for id: 1Kd5vbYmkaDSed6gJrDHGk
fetching features for id: 5PYEBQrRHWoMuRr5Ers9vR
fetching features for id: 5uLqudCirDBdWjC7xJcLgh
fetching features for id: 59ifvc2IeQwG2zr3qQtPS5
fetching features for id: 5iEhTuTOcuQ50caDT8YItz
fetching features for id: 2pMwweGkjNLXZkSDZ63LYf
fetching features for id: 3QaD1V1SYVrYalTPl0N9qP
fetching features for id: 5VToyZ5cdGGwakh4wRSvMK
fetching features for id: 1Y8WUbypd9NkPbXeBzTuIx
fetching features for id: 3JIdgJIE0QPz890y3uz1WV
fetching features for id: 30PanNHxt198hIfYtvU9OF
fetching features for id: 3RDRUhfiesfrRqb8pLno8m
fetching features for id: 56rEp6uhPXIlneMX3Eqp4b
fetching features for id: 23c4cwGTt6ifXOoO4FlBRK
fetching features for id: 7D3gxKkomdFwzOsyECxvvx
fetching features for id: 5RN6RUj3m6mhlU70Bblqkf
fetching features for id: 1YQWosTIljIvxAgHWTp7KP
fetching features for id: 5Shcei9ByRcV2tABcpflJK
fetching features for id: 5mPYngp8SBhJlwkBA718BJ
fetching features for id: 79BVw1dAEhmoEIPeerlmHb
fetching features for id: 7Gvf7lC6lV1x5Ul3eVePdH
fetching features for id: 0DxASYDMwP2M84aD0G20nT
fetching features for id: 1sNUxDxmEzRuZKizOpv59D
fetching features for id: 6onPwTEL5oCwtU9c4FNk3E
fetching features for id: 4IlsLphpkrX1fxA1vuuSjy
fetching features for id: 3IBrdIpvRYnJQlhGY2x1JH
fetching features for id: 3OvzW4ex4PCAkr25M85WCP
fetching features for id: 6f483gsIVo8erb6cJQuiNA
fetching features for id: 68ktEHI9cCz4Lz6RMmnqYV
fetching features for id: 4OKXTJcGUrrBCnsozG4WxH
fetching features for id: 3Rf03HXu9jgcCtCtT2oT4p
fetching features for id: 6RATNusyYnf5rTQHsxdoVT
fetching features for id: 3moCtvSb8mfSdLlmrCIcmc
fetching features for id: 2BgLQqzv1wnhyMjZa8FIFN
fetching features for id: 3VYQy0o7oi0ea9XsJOYjeq
fetching features for id: 2meLjIcyQBz5Zt2sctwZUd
fetching features for id: 1ATiocRsFUbrnEFhb5Y4y9
fetching features for id: 62LUftiabO67epbYZUhcu5
fetching features for id: 4TE1mJzeu3os3j4YcCfkMI
fetching features for id: 61Y7DcAlDPa6BG8Fy1x8pO
fetching features for id: 2boEGV3Zcsh7AMaL3jqBGP
fetching features for id: 1paWReMsPIqKGEOxdlHT2D
fetching features for id: 5QSQFf6dWrtGx6bukUd66y
fetching features for id: 0Z06GCuyB8OoinQQltymO8
fetching features for id: 2ZPFnNILBwHasTSHRDIy7W
fetching features for id: 4YOujaYI7YhWtjHQg7ekDx
fetching features for id: 7encACbtTCKvlRrl3hPQHa
fetching features for id: 7HYfzp6GC2miSBlyhCj0aI
fetching features for id: 4a8gc5dQMakv3T24PMVg8l
fetching features for id: 6VFtCu4u6NeANjWZO131Sl
fetching features for id: 06VKnttgGlIvFhZQgbuQtu
fetching features for id: 5pgq6Pzyr8iF2Rs7vzC1nd
fetching features for id: 5eVN3P0yaBYs44eCAHotim
fetching features for id: 1Z2E0hzef6vJD3fCFWoWUi
fetching features for id: 455D8RzBlovExTuwS1DGQ7
fetching features for id: 24geY1fOQlesj5U3HqnhDV
fetching features for id: 30txFxKhnMJqUXz0xx0x39
fetching features for id: 1jTrURLfV3NZJoSk9kBdTw
fetching features for id: 6taOhSWQVW7wd0lj9MX1u9
fetching features for id: 7kYA94PXwjCZLJsv31Ec6G
fetching features for id: 6R5Hl2c1fcHLqvWtdZr3wm
fetching features for id: 1yd9fH4cmpm3to1jDpm6B1
fetching features for id: 6Jh1r6aDr8Z7jU2zairxW6
fetching features for id: 74vEXsXVLRDPw7e1R7zkjC
fetching features for id: 3152NHAqd6T3E8BmISjGac
fetching features for id: 73tGO4JJKrMYtjCbq1v8Oa
fetching features for id: 2ELAMT68a78OGwhCgj9Kai
fetching features for id: 1y7tq1Yw6VBjD6bXkxKJH0
fetching features for id: 0WLjPfgtkpLgiQ073fLybN
fetching features for id: 40hXwATZJS2voIn45UNRkQ
fetching features for id: 1xVTrDT8ryK6YnbJOB6MbY
fetching features for id: 0wfMWsPcabTqXbfacWVCej
fetching features for id: 1B0qQzYE5tvDiJadNEJCe8
fetching features for id: 0CUAzACLcr0QwX0rmSsn6n
fetching features for id: 08VsAksvO7as6FqOmeX7z4
fetching features for id: 7stF084H3lFfS8yWYdncRE
fetching features for id: 01GarP7Iim3fsxASclkEFW
fetching features for id: 16wsaPpSfcvo9ysLD8BZ4o
fetching features for id: 2f4K2yeD2Vya8JJDngYP7H
fetching features for id: 2246ygqAMd4yFdNxkGwchB
fetching features for id: 4bgZaavtTfbdFN0Utz81th
fetching features for id: 49JyvsH6SdsldNMAG0KKYu
fetching features for id: 7743M862EmJ0m4KfCKYEbV
fetching features for id: 7b5CaZGoxF5rth1jBITajb
fetching features for id: 1jjYKkQ4nNoZFHR32FzCXB
fetching features for id: 1Kdqwkq2AI5S0txCal6bWn
fetching features for id: 1IlyqTe388mP1XO0vPGCBv
fetching features for id: 419PaL8alIb8WVe2a8f2zQ
fetching features for id: 7pXT7vvve8VEI2yy86EWva
fetching features for id: 734EXsNUPyJ0KvqkyCO8mD
fetching features for id: 0HkgSsFpKnOAYlzUfUz1if
fetching features for id: 4P5sAyZWsse8eqdfF6f6fv
fetching features for id: 0dR8YiccZ9wnpHgMLAXWW2
fetching features for id: 3q5sp47cS6OAebFsystUB7
fetching features for id: 5C285mitLVNBYDMjI50B89
fetching features for id: 2BLq596hGgX7cr0QNii1jA
fetching features for id: 0ValZZqcUDBHJqPZKiGr8D
fetching features for id: 4jq9pn4eAqXD0YZTT6MQbz
fetching features for id: 0GG1yxYyVk8q8XteSIK0Ul
fetching features for id: 5Zfya0scaR3mFcIwtqX2Eg
fetching features for id: 25pEPwORWMZslc5RabedUb
fetching features for id: 4RdB68qqZ4QKxsnVWMi6tQ
fetching features for id: 6C5IC9bZ0q6mI16Hay23gB
fetching features for id: 1EWUFMTwyqAzb1p9KFnt02
fetching features for id: 3p2xLi4pxWf81NeuBKhR6E
fetching features for id: 2l7dlW5nopmzV6M4yEcJ9v
fetching features for id: 6LH3R2K7clxju5NTLgtBEl
fetching features for id: 59VGlZTbkRf0mnRr5Dmr9F
fetching features for id: 68LIwSJYSkEj7oHcXCcHwo
fetching features for id: 23PDpI6IkJUAjZvgPDIPh5
fetching features for id: 1JCgRCGu6iAFQa9GdmqvuT
fetching features for id: 3aGmELit8NfF9XOBB8A6ze
fetching features for id: 6A3rvsveNrl3HPacFZJgEZ
fetching features for id: 2nzEcky0rEUkYOZGx1WUAA
fetching features for id: 374ulCqIly2iGWAaSd7xKL
fetching features for id: 23Z7SuXIUt2WVcdD2iA9PS
fetching features for id: 1FAtkmz8kFmbq4VmOf79fb
fetching features for id: 5u18XA6S3bSLLz3abwpdA3
fetching features for id: 2WL4moGPyxOA0W2p0zJZPc
fetching features for id: 5GPypf3Iy9VoguLGUiM0pn
fetching features for id: 7yg0J6AuFKGF78mmy1req3
fetching features for id: 7aHn8rGeEGkDkQiUV0DpW1
fetching features for id: 0jjQWudovK9z9BzhoFlt0f
fetching features for id: 5gCkto8RSjGPynLo3GjOk0
fetching features for id: 6TbQa7OPpHtmH2SIhF8ZWp
fetching features for id: 6Y3ZRbwgdJzNFkd8knmtha
fetching features for id: 3jgvc0IiIQLPoJxVGhUSGI
fetching features for id: 5SoWRYrl09HS5YjXMWd3Ya
fetching features for id: 0SAlafSCRsl5R4yHrPPYZk
fetching features for id: 6DvxDtsae2rrcFQbtPiWP7
fetching features for id: 1i1RRNiQOQ0qQAV6enlm3Y
fetching features for id: 4SPtuskN0gzxkXGhOU2ZIc
fetching features for id: 1bsz8S6OFpLxUlsNuhPYqx
fetching features for id: 5fFH4rUUyElae5AFkCqaeU
fetching features for id: 0is2GaJpvkJf6EuWi2gVuM
fetching features for id: 1sVI0D7jTh5oXzVTP9aZHE
fetching features for id: 5SfKpyvDvNohhT1NPBGTQC
fetching features for id: 6dtWl3vou38CPcKj64xxdy
fetching features for id: 3TX4LYL1VNKAfASr9rUP02
fetching features for id: 0mRAY4VhmgEsBvLEFxB6Vm
fetching features for id: 7kkuwIR1DLPZiGVbUTR3ge
fetching features for id: 2n6gh6uZCeTy2H6rj1UJOz
fetching features for id: 75hgmQO2o2IUx0vgl0l54e
fetching features for id: 5tscs7pvEkhPtZS2Bi1Q0p
fetching features for id: 7pow15KCWlKuMPe9o4nq3w
fetching features for id: 6I8nJjLHtnL2VunrxUUyDE
fetching features for id: 5OtJxpKGhfZMPi6T2vEhjo
fetching features for id: 1ViXf1GXbu63qufb0eoua2
fetching features for id: 3jHjhvyYmuyNmZilVONE59
fetching features for id: 0Q6ylLJT5NaIKnLDeyOxp8
fetching features for id: 7mogqg1O2pjRnQ85E5phHt
fetching features for id: 1Taar5sR4NFefr5wU3Tb9v
fetching features for id: 5xtEEfyUei2ojQGQzDg4g8
fetching features for id: 7hicCfvTRlRjkrHYim5Gp4
fetching features for id: 6VYTGkoPcOQTZNsySIPpbx
fetching features for id: 4b7mN3AzaWHJ7GQTNB1CSs
fetching features for id: 5ZA5vpNXOxwiPKMvTe2Mj3
fetching features for id: 05xGj0TWWNfOF76CzY9woN
fetching features for id: 4ECyERuKQUKEhtQGpoKKwi
fetching features for id: 7oZA1hYBUKtKZ0cVpsQ0xK
fetching features for id: 7yQttsqzo0g6RJbgYUElxJ
fetching features for id: 0t5IteKDnugg7tZKEh8fYS
fetching features for id: 3xsMYWnmdjSMLU8tVLVCVV
fetching features for id: 0aflkSQaHnq3ZCaMoawMSu
fetching features for id: 7KINnXORWnM4NeNwrEqlTH
fetching features for id: 50l67uDFLIW3d9C5QCf4Kr
fetching features for id: 05M5GJlDSyVHS6zxV5AY3k
fetching features for id: 1uhm03Mco8BvXcucnLRtGA
fetching features for id: 44iYHhqsZumnQedqNtOK6x
fetching features for id: 2LhIq64BYsvV1UABBhCfG7
fetching features for id: 3Aq3KkxoTxcTApZASmfMsk
fetching features for id: 5NtfIcgjY7AyBQTx21FgZX
fetching features for id: 2zXOV0ot7bh5P9d1bxZcu7
fetching features for id: 62Y7FH3Irc175bIxMueFO7
fetching features for id: 2C17Yxj3V6ITiNlspPWTh4
fetching features for id: 6R8dZJ0hAurSStsFZf7EDF
fetching features for id: 7r60cjhpBJxUuU631Q5YGd
fetching features for id: 6dKl1rfY3GeGaPfKQnNrhQ
fetching features for id: 4gqV9I6tpaMKztFRPwrPOP
fetching features for id: 1vHK496r8mzXkvDiCeKw4m
fetching features for id: 67V3iEAHSdJY1pQPbbwfDP
fetching features for id: 5hkPR8rFMTh1iI976sMTT0
fetching features for id: 2Vr3XMnJyu60xBUUchWUrI
fetching features for id: 39h384i9hij2H9Qtl9XUuu
fetching features for id: 4QC7vy5Oq4LKxhdPAnUJxc
fetching features for id: 5sGeyWYDvUIF7QjERNCY7L
fetching features for id: 22CJtUo2zz8CzjLoiPzjdi
fetching features for id: 75hgmQO2o2IUx0vgl0l54e
fetching features for id: 68cXqkPaHAFkotUv3ETwZD
fetching features for id: 27liy9irlAusbfIBGdmVUw
fetching features for id: 62O8bda7nGma1Op5FMSRgh
fetching features for id: 4Z2sM6VTcNZZQjpjR267go
fetching features for id: 03X56OrDOejgWEz5bVX1gJ
fetching features for id: 54oRwZxnC8fN6RyjdlHBTP
fetching features for id: 35h5WR17DqnV6sNP2tYtW9
fetching features for id: 2VqsUEpRPvgiiBdsBuQrJz
fetching features for id: 2RyjckhJBJcYp4Q0NQDou0
fetching features for id: 046Ob1UgXlcBBlJLmhfIpw
fetching features for id: 4utfRUC5RBaa9a1UcKJf6L
fetching features for id: 0HxG43JHzmyIuKyEVwj12V
fetching features for id: 3WYhkNYK5opOjN6i9cxKDt
fetching features for id: 0XkuWDNnWDOZbGlZFGWMLO
fetching features for id: 2Oy8RjlzGSZuNtmnqTHQq2
fetching features for id: 4AbV3QiDq9ESX3f5IiouAO
fetching features for id: 5C0W3pVo4NhIORE5fKuMzi
fetching features for id: 6HdhIt3JLIPUJDUU9yuXdk
fetching features for id: 2hQGSyxrGKGlvCNDCFhy9d
fetching features for id: 3DLic67t4RSNW2oHirZYIg
fetching features for id: 0NTfW7dIMRAmTKNQaCp7Ya
fetching features for id: 6IuzlcoJm8VckTrjBYt6bn
fetching features for id: 5Je693k5EPOByKPmjPoFnv
fetching features for id: 2TRW0hzjNvaZi9SMVIRTIX
fetching features for id: 2f6s89LHeGIcX0ygUH9sT3
fetching features for id: 58EirkLFB6O6wwRN9JowHB
fetching features for id: 0DwrBte9lg5HYIVC8aJ9p1
fetching features for id: 0z57v0bDuI9gByZWZ2qpW3
fetching features for id: 4YDV8fPzCCpghxuy0beu4P
fetching features for id: 3UOq6MS3TiIbf5RgVFXreu
fetching features for id: 7nn0V1maFiJDBDg2umSGMb
fetching features for id: 3Pq3nWYY5IBHfFhCdrZgzt
fetching features for id: 1EodQWVLRiMQLtbN2MHIK6
fetching features for id: 7rjBc7HmaaVoXJZlgIINgg
fetching features for id: 0Z92h6rKELLqw9uyy5vH0I
fetching features for id: 7Emakv3drLP7KasNqvKMhP
fetching features for id: 739SkjJC28YZmoatFrcSZV
fetching features for id: 5oLY2gz4HnKTcXxKkaGrsa
fetching features for id: 5Kt64y0teh7Y23o8JHNC8l
fetching features for id: 4qpH2D4VQfOCdV7zYkt8Hr
fetching features for id: 12va5dqzKOAO7kBLXD4m9g
fetching features for id: 0hYEYnKYQt5d0zQdUABH3V
fetching features for id: 4uNAdFLTuiIhDtbZsq31Og
fetching features for id: 6A4GF6fhQyP49J3YzjajCZ
fetching features for id: 74a9dgVrUnZfZewMWgs5xi
fetching features for id: 15RAM0eY6ILldKcnI3jywF
fetching features for id: 1GeEjqxTc0u7yyMv5SeHBq
fetching features for id: 73UNWjUVZUBoF097z9Jd7J
fetching features for id: 0FKNRNN2mcY9dBjenoxbeY
fetching features for id: 4FsCwhDpGkBOKRtC4jLA0B
fetching features for id: 7wAzqU8jYiqjLSvaTsfPxR
fetching features for id: 3T4obVwhuRZkhVUepaFMf4
fetching features for id: 15jedQZra2UBjZcd78gQu5
fetching features for id: 4zlIDHZRhGoAmLiQ4kCSDU
fetching features for id: 2XPEv2sB6c44DSocT2TwAc
fetching features for id: 0dPfKBYyPmoSfoLJqeKAs4
fetching features for id: 72EqLyVcjkRwDGAH1ATC9A
fetching features for id: 5D9WxxdVOHUeBVMkVmQxZm
fetching features for id: 4MYwuoHZRF7zcWH9zXUhDQ
fetching features for id: 3jkHKjSamNIw78f68dqA93
fetching features for id: 3Sp7BnFTClcfz3N9g36y5b
fetching features for id: 4L7xJVwgTStG1EFiHWQFoV
fetching features for id: 3e8Rqh9GPB4g6qPNPIYN6n
fetching features for id: 4S2YE89fZPWOsHoEquZTLZ
fetching features for id: 3rjOwWd1wgJNvoqskwcPhT
fetching features for id: 1IIpglMfoS0Rge2ZpBTmRm
fetching features for id: 3L3nhRywze6Ri0FmU6Fp7n
fetching features for id: 7MTHzGaWm4Z4h1hrZl9vgx
fetching features for id: 14WPjMgMJfsxSaz3dm9KwQ
fetching features for id: 70UvKFI48DAQ2suS58bu5l
fetching features for id: 789lq0oHhOa4pHh2y9rjkN
fetching features for id: 499iOZxvZrkYGh0sHffvbZ
fetching features for id: 1CrXQTpGrMhym7ASXcmTzd
fetching features for id: 43cG3qOGaOCtRN58ez1Pgc
fetching features for id: 7kOC1rXz5Kx6TycRg8yzMj
fetching features for id: 00Bu7AiNb06604KMuYTQAi
fetching features for id: 3QAkevtri76hEcI6bi1iQj
fetching features for id: 4zj5M7tRcRj7vP1NDZgsth
fetching features for id: 6tTGoSv21CvDXg3UoI8lQj
fetching features for id: 77KfccxlZ8Mq6Jk1kqCaal
fetching features for id: 5ccGCE1AceABOunwSCVAZb
fetching features for id: 0KkmbMeW0di1BgRRzN9ca6
fetching features for id: 22t5Efyx3X7ZjrjfjC8xMb
fetching features for id: 0qBFeHjlC4yBTQVPCI2gg3
fetching features for id: 0WFi7WfDuy9aT14KC5Aucu
fetching features for id: 7M6YTNCCXcD4kK34pDBg7u
fetching features for id: 3JLas4uh1MR6yDdfZVAmYs
fetching features for id: 3RN7zbk7QEKR0k3IYGObfp
fetching features for id: 6KUvIEKf0hDsbBC7rmWbvB
fetching features for id: 7aUntLt00Tgpawu1zrQvKd
fetching features for id: 06is43MVB1a03zyhtRBLpX
fetching features for id: 5hleF20R6SrI99rz269cSS
fetching features for id: 7trQt6FiO9hWIf2ltWNmuG
fetching features for id: 5NeAsRDKMFwNoq6Mch9QFu
fetching features for id: 0coxJ1H2iiOeCaVRCzHROs
fetching features for id: 71TJl06EAYPxNJMEXBeGJ3
fetching features for id: 5Zt2tF219dfVI32VAORqXH
fetching features for id: 7MIzIo0Z9S8zmBiXdGwOeS
fetching features for id: 1BILDfIiSgFh6MvLHjq1Jo
fetching features for id: 5UPB1Bodo6FPlWbjkLm5WX
fetching features for id: 3SzizrskGZBj2oSWycYkV6
fetching features for id: 7BywmFdliPDRvotH59zWTQ
fetching features for id: 74oEwvr2my7wMYJYl5WiMo
fetching features for id: 67wVH9o1QtIZguaGNc3cmU
fetching features for id: 68n5UfHyShU45aHYamc2j5
fetching features for id: 5EILqUxXppdJhvrHia72ri
fetching features for id: 7p64JJtRFSwlOp07twxVZs
fetching features for id: 7Gpx2fNJiilvrf9Ss8qbit
fetching features for id: 216MJwr3jtriO8Y8c6zal6
fetching features for id: 62z2Gtm5Pmzecb2lvLp8PU
fetching features for id: 6Acz8htWPRE4fpk4bvAEr2
fetching features for id: 6FodKjIrgo6gIUQq8gcWGV
fetching features for id: 7BYxqnEMQodVTEBrraP7jo
fetching features for id: 57gvCnV4DqXskdbxv8z899
fetching features for id: 3DqzSyHtpCUIRBu7n5koBN
fetching features for id: 5hNgmiVNNQjgaoi7o5dwJR
fetching features for id: 6Jm93p3ISmnMDH3IlN3vxG
fetching features for id: 1fNR9tY52D1V79V5eNqxxx
fetching features for id: 26ecDeti5s5fPVyOft93hX
fetching features for id: 0qAtbrY5DNGHtAl1L0CQzS
fetching features for id: 3GK5ETquucGLxWBrzBY9nY
fetching features for id: 5MwAvp85J0t4PDqcQ0DLNQ
fetching features for id: 7EKMfvkFVCaCPNDgeWZDm8
fetching features for id: 5oizD47GLSKDlfCeSiEKJz
fetching features for id: 4TbMBJgbFWa0zAvrgLhUwy
fetching features for id: 10R2MFLIpSAtCjJdXUUsJY
fetching features for id: 2SUe50SsXKH97zPwx1pnuL
fetching features for id: 7CHmNiReghVLKbD032HHwz
fetching features for id: 6aVmTAiMtCXGCdsCvoYsSC
fetching features for id: 5eUVr9Ja4GK6e1lcIXjML7
fetching features for id: 79Mf2Y6Qm6ajJ1AeHouZxI
fetching features for id: 7axj2soLPxuhxJEYaUoYEg
fetching features for id: 2JuHkrWEmd7h2plB0mIBRW
fetching features for id: 5HSpa327ERZBmSr4Fmuyyd
fetching features for id: 6iTps24cT4G4yiKnQ9Z5pn
fetching features for id: 7sJMF0hrpfIxH4wO5TShMo
fetching features for id: 4VFgAvzQ5gKTLXUBZ1yd00
fetching features for id: 2J7WIxR3zdQGpJhS7W7QCI
fetching features for id: 2GqjJ42UPMXqBDmg2tr2nb
fetching features for id: 1jnbu1z52a8fmU3clOiVEG
fetching features for id: 4UD5hLK5vmDXgFRdhoEkBZ
fetching features for id: 0zZCYZgRwyhRGh5Emorbmx
fetching features for id: 0hUrQ65aXYHm2zEAlBVI6F
fetching features for id: 6gTT1GwvDp9OMakvXFvaj3
fetching features for id: 7HkFPQouWNbR5lpHf0PtSH
fetching features for id: 6uEuTHzvNheYW4Hj7HhPaJ
fetching features for id: 7rbvjRc6ZTVA3GUXDx3Xtr
fetching features for id: 69BitnFFEW0yrtbNQL4inL
fetching features for id: 3l477StNmG8dGEwGzFshxh
fetching features for id: 7vcJkzUTxuGvrs14PM6qSY
fetching features for id: 558Eq590tn37GM5CuSMt5I
fetching features for id: 1WwbO6D1l5GCgplARhJrEP
fetching features for id: 0p7mtSBkoZY14zR7QqUtid
fetching features for id: 5tayxTorhNcu0Ovz70VqOF
fetching features for id: 1PeRRbSQ2kSK0a3GAXxR9L
fetching features for id: 3E9UfnzfTjfvlKvqJjXoTJ
fetching features for id: 3VBvrFtAU0cLikjrseZLv0
fetching features for id: 20BJYonZzlLzd0cbhIED97
fetching features for id: 4dmHFU1lnFhn90jjE1ILfd
fetching features for id: 0aPiYc5UPHh4jwMPNaLugx
fetching features for id: 1QdxAlq7M0dW9Kpu7LQbgx
fetching features for id: 3xDafKq6lG6TJ5m0cUBgtt
fetching features for id: 4xHlxSvdAE6o91oeAvXOfS
fetching features for id: 4tFENy9kf1zjzRn00eI0I3
fetching features for id: 1GIlSd95YSEUzueIjZ6Y8G
fetching features for id: 3kvyaShezE8dMWfbL44TdI
fetching features for id: 7N29YCbJqmuQzspqfEebXr
fetching features for id: 519hCSC8d6zu3k45ESSVqk
fetching features for id: 59NHKjzLapyxuTpOBQQh2s
fetching features for id: 5vTp0vGLx8SJgqxVLnTHtK
fetching features for id: 1YaVBWSwCVxfo8nmc2OQjW
fetching features for id: 3jqJsdqZsSzGXuCYoeSss3
fetching features for id: 2ib1IMnHzjMtB7k6WkMcGv
fetching features for id: 2It5KW85y1U0G4qlt4Lx1D
fetching features for id: 5FiTi67Q8krrT0gBM7X62V
fetching features for id: 6OmgGmYVAF1qj0rJj4MooU
fetching features for id: 2aJFklWpbkfYTnXTWSpMPE
fetching features for id: 5QEJbqvNNkFWzyF1l8d2Ci
fetching features for id: 7uXktLVq43HeS4fPU4nQna
fetching features for id: 0ULyHRmncqiUWTEIwNvBIx
fetching features for id: 1OnzfZTZGZPjayArD35MFZ
fetching features for id: 7edwuDSs8CFv50fUIaz49i
fetching features for id: 5QZyGPDlIyk75zl8sOOXbh
fetching features for id: 5ipVuHZ3mQ8MfcejT5GWTr
fetching features for id: 1H3v333p3oy1mI6zRS5C05
fetching features for id: 6JEpJ4tOBsfgMZbHtzW0Fq
fetching features for id: 6aW0GTIAQcHdPxQczS0rG4
fetching features for id: 5bfWbTxkXYA1ASKShrkxJx
fetching features for id: 7c2XSHiN5gOCPiVefDGq0R
fetching features for id: 1ZUnmb11KsOyNHsx0AvZQT
fetching features for id: 70YloubK86R8XZ7XVQOuXF
fetching features for id: 00Vwp9jQUs52JOnbbLaz5e
fetching features for id: 6HW8fAuEDFLfhJhVzqvd4K
fetching features for id: 1oLHbBjM1BeeBTwKHxJ6F2
fetching features for id: 7MOomqWm8gv2WJprpiuE7b
fetching features for id: 6mCPGlWSH6hDQucRmSfUFq
fetching features for id: 7EumbJIbcVSyJSYKc4aVsT
fetching features for id: 2ZoIGwdHUOILIsI2ZUeC9l
fetching features for id: 5UHxEcSaG7vwVOSZlM16Dc
fetching features for id: 5sUTy0TDw5JCwQr8TCWcG3
fetching features for id: 391TUcoPonqYykPkSZ5Z9U
fetching features for id: 0DgdBjcAGfwbI4OiARr2kE
fetching features for id: 06T4nDgHdNhGMNePPtfcki
fetching features for id: 04DYwFIKeq2Bkn9aqSI9PC
fetching features for id: 56T9QL3AHmckfvWkUOjGk4
fetching features for id: 4mHwuovUFonfDDdAaCP9Rg
fetching features for id: 1wX7yflolAYDRHnIkiW15b
fetching features for id: 4RtUXIA2fQOMhuNdWsGUEv
fetching features for id: 2Xt3YSFvqe5AzRbVrETVbv
fetching features for id: 7d5DaXxZVLC2aww0lDSQOs
fetching features for id: 3R8WrZ6UsjLpxOl6jXWRIu
fetching features for id: 47mA6f44zxLtdATOoY7GjN
fetching features for id: 6cRsRBzSaGwfnMMqNSjtFY
fetching features for id: 4YDjNlJUlrwKaSCBtKmHX0
fetching features for id: 5CZaYT4b735LHmFlKKHh8f
fetching features for id: 56ZVFMZT1OJK8h5fnsk0Yp
fetching features for id: 4m3Q6C3o1jYamTikYAm1CI
fetching features for id: 7FoaiFjaAwad3JIi2oki7z
fetching features for id: 5OWcriZOmfyk6RMI0QxeFg
fetching features for id: 6cUIm6LO1lcMfDlBeGa4H4
fetching features for id: 7LRlsrrX3IT7nyLUz1fT94
fetching features for id: 26qG4DXBnaBgke7u2t0qvz
fetching features for id: 6x8EqpeM8XzeoEKfatpSYP
fetching features for id: 2TqZC7BpPlwEHbMGWnjXf8
fetching features for id: 0dq6oBqqc02ulVK62U668q
fetching features for id: 4eDmOSU6rMzjIim9FsDtHY
fetching features for id: 7qinPLZ41lgIGuT40EAFs0
fetching features for id: 2NH3RgooS3Am8LcS5m8dwW
fetching features for id: 2hP6oI5fZAta0kj8nYn5CK
fetching features for id: 7CUN42trHz1jiAooYUW1GC
fetching features for id: 6lRbAvHGoMS0vT4siAaMSk
fetching features for id: 5WzVjwYFWbT5jUsGAnXUPh
fetching features for id: 3Ec0t5uhQ6DK9m3kPIUOkw
fetching features for id: 1gezSbknNFRysgyTf1paTV
fetching features for id: 3FM1yG6lv8Yy9LGwfWearT
fetching features for id: 1CyZ8WyY72ByCIFDXdCdaq
fetching features for id: 7v1858htfU0srTDwhxeka8
fetching features for id: 7l9RNVHHVgG7mMqnRoEcjL
fetching features for id: 2HWaoy5exkRIulp61fOvQj
fetching features for id: 1t3QVqfiWHIb5YZwgrj4LN
fetching features for id: 1XunTmhOcj3xwh4b8P3isX
fetching features for id: 1Eo6eH8Yppywb6nkWDhB1b
fetching features for id: 54UFvpOyoqlUu7N09UymMz
fetching features for id: 6P18UDWGqrRAtQpMvxQ5t6
fetching features for id: 42VGtwRV739rKlcSePEpd4
fetching features for id: 4L6MvAch3m0KnIzMUh5RAG
fetching features for id: 5BwSsmZPm2T7mqMnlGt6L6
fetching features for id: 7HdpSJmQ0b2qIV1MfkaPpM
fetching features for id: 4w8QvjM1Nr344937eMBrgn
fetching features for id: 7yLlSCB0KabT0sy0JqRRzU
fetching features for id: 3GsZ363BCLrRYTl5GKcShC
fetching features for id: 5C9ZW69WIoyEGg7QGJaIWI
fetching features for id: 7L44WJ4w3ngx2kH2hzdSdG
fetching features for id: 2pAc35JdZuZHHOAqGqXJyX
fetching features for id: 2FQcPtw6VJ8t2D8YC6OiCC
fetching features for id: 7IIfi6udaIkFvw4z3xUeub
fetching features for id: 7CdovAv39pva6nxPvxqwJg
fetching features for id: 5foJJKZXkVdAp5XBYPR5Uw
fetching features for id: 5lp5hSm3LhWTsN8Muqehry
fetching features for id: 46v2DKi9gdm638SixadxLd
fetching features for id: 18YPNs20cBfgqkY8YCJlOH
fetching features for id: 2B4IYgIkBnemrp0YEkaUIH
fetching features for id: 33ecRNaSLc7w7TVMRDgrQ3
fetching features for id: 1qj9BaE3Odlp78GDtDaHQJ
fetching features for id: 59UMLADL1NI7IY44H8aNIp
fetching features for id: 53jsT1bT1dRXQQniKhWJqc
fetching features for id: 6aTY7Dx6719A0rTsYY7NgK
fetching features for id: 1sU1lBFXKG9TT5oU1UmTiE
fetching features for id: 12zEAgwHjIQRYNiAAH5UiZ
fetching features for id: 7F7GiHeIgnI9TvMjbk21iH
fetching features for id: 2oGseypps0Vb50DYozqMcb
fetching features for id: 626zdy6z4Y199W0UgVHYva
fetching features for id: 1clawHbnjJhCFGeBbYI2Yy
fetching features for id: 7hpa3Bcjt8Or7tC7TtOIcs
fetching features for id: 2UdOyotmZbAILykyQdy8R9
fetching features for id: 4kcTzSegbAtWgMQyNEWOMA
fetching features for id: 0q5dt70CzhrvCvFziLrYah
fetching features for id: 0mHeCqDhYLvvP3lH8qWIWC
fetching features for id: 1Tx2BAn2UEw1O2qn5Bc8l8
fetching features for id: 72jyKeBzD8UHh52loubHKm
fetching features for id: 4LjtDN131EpaPRzxBftJ9z
fetching features for id: 6UXvWxLEiR3LJ5PtAAOpN6
fetching features for id: 4MszoBMW2yFxHFEHmjXmiT
fetching features for id: 0VMbqYoFBmCr3DINNPBFFF
fetching features for id: 1NAvZNIUfC79QACBJ7qhK0
fetching features for id: 4uOiETgQvxwCjxTXfU9q1H
fetching features for id: 2FvpvjHX32OLvendFImN9N
fetching features for id: 5dOC0aI0j54SJKLZJTQsvH
fetching features for id: 4lrtR5XurtYh5GOptKtmyI
fetching features for id: 2tRs0XGkvtH4youYAGevPS
fetching features for id: 5y5Hbumds0xwLEp1Xahne2
fetching features for id: 1fMjmMwmqRR8C2MFvDM7eA
fetching features for id: 3GH2K6Q555WwVemv01pBX5
fetching features for id: 5ZPv7iC9p3qaTgaFbouI3O
fetching features for id: 7jIEptSoRxLTp0LJAwovay
fetching features for id: 7KfwGraITe6aLd827Wq0bh
fetching features for id: 1QyuWKLRd64Mpn9GesCxmZ
fetching features for id: 5MXe6dKcR6iO33GWzVW8cr
fetching features for id: 6xp507xP2TBK2rUZkxBB5O
fetching features for id: 3d4JtDwvj5DPtz2B9VSIAx
fetching features for id: 5X2w0jQ9jHaYSkqKOKqtH3
fetching features for id: 6GsCmMNA4xepZ5V4IM8iLl
fetching features for id: 013tlTXGKvrYjMh1cz6ZVK
fetching features for id: 12EYGS2kIMDFBzd7mGFDwu
fetching features for id: 69cikKSonWdugzCyc38K35
fetching features for id: 4MexmY7hcWQB9tNfbnDBIm
fetching features for id: 7z2ahT0htzgYLlq0wmxHdR
fetching features for id: 1ugc1PHB2qbkcuOaGiXXIu
fetching features for id: 39UXaEcfOP4Yii2iCej2zE
fetching features for id: 76tUNmN89oKHpNc0SsXdfU
fetching features for id: 6Gv10QCi8aaEdBlpXrpzy8
fetching features for id: 03M7HHQXR9Tz0Fb3o0FKSQ
fetching features for id: 2cvztQuBIxwV38kg1Ydaww
fetching features for id: 7JhN1mVlxu5dZzE9QsP0NS
fetching features for id: 6V76ONUZMavBZa4jLSZHd8
fetching features for id: 3p12NWIZxrdj4wgzYzcjLl
fetching features for id: 5zSaNRRrhmn60aZFGYa76b
fetching features for id: 7yuSXXGInvV9gVvamnpdbX
fetching features for id: 0GRML5qVYv4JSzjvQWYIxK
fetching features for id: 5apwLmnzpwqnFffc2RSZhC
fetching features for id: 1PhAHwHFnCVMgIl1ObdkNN
fetching features for id: 1swkskqEBEyaplF10RuzGw
fetching features for id: 3moXW7IxJzjMMJDQt7Ja5i
fetching features for id: 6VdJa3dGUSw7Ju141CYTJv
fetching features for id: 12JcJfzNkD2IOsp0MazlNY
fetching features for id: 7koZFBqxHXUqudMHxNTrb2
fetching features for id: 0DpjGs3EbsI1TKiYSSeTRF
fetching features for id: 60UaybLNirtwinCRJfhfX0
fetching features for id: 2AURcFuFFPLEfzCKzCGYP0
fetching features for id: 4FAXiLNSi3qGJA5zcX9PiZ
fetching features for id: 1mzAgAg2Bd4AMwphrbIIMv
fetching features for id: 18ARvnm1orP4B2JMqIsVpt
fetching features for id: 253qAom54xbJSNlq9pGzAC
fetching features for id: 0suTtYLoNOywMP2pj1fL0X
fetching features for id: 1mVw4zs1czeQwo1MueFGr1
fetching features for id: 4EUL6WeHpGltSOBSQwDWNA
fetching features for id: 5YfbxqUoLKpIxQlxaOGXnq
fetching features for id: 0RT3CrrdFJLxDnTMzv2qCt
fetching features for id: 0wacMQp7uVCSV7WVipU1yK
fetching features for id: 5vnlLrx6Be4mdJETZAPID1
fetching features for id: 75wzEkDy6NnSj692dA99ip
fetching features for id: 3Uw68N09eLlFFwlsVqupCM
fetching features for id: 0maFYFOeM4ut7J0yz77RJd
fetching features for id: 5KaJHgTXG5GIkYB5Pjh1cf
fetching features for id: 2g2GkH3vZHk4lWzBjgQ6nY
fetching features for id: 3Z3DNF0peaoigwGVnTYEw6
fetching features for id: 4yzLxHEO5nPKwxjGYLF5xI
fetching features for id: 7GUyfcCytZiqY3rpzTI0ms
fetching features for id: 4PZ1es22XnUyLAM4GsCIlh
fetching features for id: 75zdGixP4G3VQ5oIk1HzjU
fetching features for id: 1WQxjJWQTUQdugmysoANRw
fetching features for id: 0OFX1hwVTJRLkpH0nDykat
fetching features for id: 249dLqv8lchzJqcHD6cXJK
fetching features for id: 5UrMX4RgKcJS7vOScBHx9D
fetching features for id: 5noWD027Ap82HrdLi2X418
fetching features for id: 1hYcTkSpxl7gd11PazZCWY
fetching features for id: 4HCA1iiTZvcDFSHZPmolQ9
fetching features for id: 6jZxxO0ZNb8fakRXryav4y
fetching features for id: 6SI0GacLVtXOgvCSKpaXqo
fetching features for id: 2RUfmM445DIR4xDwA2vSsh
fetching features for id: 7FOHypk9d7fS6k3KTBnLvj
fetching features for id: 2V0U78Xx4KY843bJbuq586
fetching features for id: 1YR24sA6nsntEjaYhoj6QW
fetching features for id: 34OBWivSH17yViSNUBk9FA
fetching features for id: 6geXF727GRr6ht9lNlzqCm
fetching features for id: 5uqWLZmO5nI7nDfoT4Fk9x
fetching features for id: 6OhEgsUJBmkEN315GC3ljV
fetching features for id: 6lDX5Uhl1It298l8hhQpBr
fetching features for id: 5ekabwI9PFHbfc0hz6n1xi
fetching features for id: 0upIcv9YLNXYr7UboPfIed
fetching features for id: 73sNfgGtfIJ6orD0ef66Vz
fetching features for id: 776DYateBNbhanoeKGIIq3
fetching features for id: 4eXw75zmi8x2Rz27tFyiHL
fetching features for id: 4OgCE8bmGO7eatG3OuBEju
fetching features for id: 0oI0ekrSQ3aHiHcMi1B2gK
fetching features for id: 0GAXsJkYFTXhw6Rs3QWFBq
fetching features for id: 1FGGo2R7TyRla9QlV2GBHQ
fetching features for id: 7iFteeFOnUMJ3vL46CbehE
fetching features for id: 3zNoELfZro8JyANvlxkm5z
fetching features for id: 6idhXzbOlJ0Qbr0FioGFGA
fetching features for id: 3ManMR2GV4W6xD9KQnDZHe
fetching features for id: 7LAsDdinXejDwodvelVVTa
fetching features for id: 0HHYIJBKj6GFchyYjJQ8nf
fetching features for id: 4EdYYjkGMqsyNwC4rnQBfr
fetching features for id: 7sq3da2yf4tSAIMNpMbTdJ
fetching features for id: 2bHXXAgNA9MMvHE2e6Dgay
fetching features for id: 7byy0qP1b25lupcuUYNCOY
fetching features for id: 3ClUZFkkKgXjp3XERlQFdd
fetching features for id: 6jQu1bSmE4loFJNQs7Nnh7
fetching features for id: 0plxbPQf0Y4KQT0wFAFN95
fetching features for id: 6IHmLFjzblJbNswndxldtp
fetching features for id: 39tQwlllxM5cV0tz8xdK0x
fetching features for id: 0gssZyDIxrn2CsokEML0xq
fetching features for id: 0X3DI3v2RXnOa7JbYolPlR
fetching features for id: 5yzjgCa5wAgyjP0yuXCvv7
fetching features for id: 2BWgyAMj5u6eYH4JfbcKYE
fetching features for id: 6e6B0cQW9FM5JqqIxqXgEm
fetching features for id: 3TjvbvtdWiNF1M3BcoEDat
fetching features for id: 4jfnYFsHmSD7yVR8Im3c2m
fetching features for id: 26fsOSSq0SYeDuSdLLbftH
fetching features for id: 2UJ55v5EICM8T3wekSJx30
fetching features for id: 6HdQf3qpj3Gh1ZVZB8FnCc
fetching features for id: 6bHEL0tDZEVZ89i8CDAEJE
fetching features for id: 6jslRzC9teP8ls7A1tUMZo
fetching features for id: 0aVMQFFqVPpbvoykIABQWO
fetching features for id: 7IS9MwiLJp91PEyoUDazqb
fetching features for id: 75UsaTIKbeJ5f3FnyEHL86
fetching features for id: 55BzdHoOUEzvFT7zgujFmH
fetching features for id: 3NLX1PKEXHWSJoa20PJ0EB
fetching features for id: 5qliF3aPblospVoSMI1o1U
fetching features for id: 0vrS4oA5PHHsVEjVa1pqo8
fetching features for id: 6JiF18wqpOTkQ2o4jFfrRH
fetching features for id: 24oaJWZlGGsSVMThy9YiBc
fetching features for id: 7dDE59NX0n466e705E8Itz
fetching features for id: 4yLBMZrf44kLJh808FpI3c
fetching features for id: 07WmDyqnKQ1lwDNK3vqa6s
fetching features for id: 1vugXqO8K4p9dZPAwfmyFS
fetching features for id: 5ygqNg50ux6hm9YEbkriWS
fetching features for id: 2Bj8nFSe3yy6myhOonbHCw
fetching features for id: 3ADlu2SizmXRt4xwGURdY1
fetching features for id: 7C8DO2GNmZ8sT03iQKOBvx
fetching features for id: 5A0YCUqVTDycmhGIZ5WH3Z
fetching features for id: 5htJrzx9BOWrWWuTFRbehY
fetching features for id: 0AQquaENerGps8BQmbPw14
fetching features for id: 7KjsPUMEiwqkAPm8G5hjrA
fetching features for id: 6OJZM84zgpRxRYtBk979xG
fetching features for id: 5Z9xikDEHaSTJPNItzTimt
fetching features for id: 0giG9rN3UBRB0qyM6MWOU3
fetching features for id: 4HPeKn4z8n1y1Rx88kIsBz
fetching features for id: 46HWAi2wZzWQaRuSuX7Kdu
fetching features for id: 4nvOMKH3EpGv64srvXI7lp
fetching features for id: 07fZuhOPreLVpwuLkTCRLH
fetching features for id: 6uIXlGfDTaToiaHoMIhWBm
fetching features for id: 657T90Nnp6WpwhZ8oyBCiU
fetching features for id: 5h1bCuHF8fASXSfIAHfkZz
fetching features for id: 2zBwGqDcl3zF8MVDtKSKmL
fetching features for id: 3uHrMzdYjAUbINHGTh7M5S
fetching features for id: 0ZqhPLdc7yEv6iADB7no3F
fetching features for id: 5SZxPtk6jheFFwp3ziaih7
fetching features for id: 57SIMj9BOvVcPKuPT54Vpr
fetching features for id: 5imnLE0ZCI4HIqJ8hv1Pb9
fetching features for id: 4lbUp0TQ6niHxckeazETZL
fetching features for id: 6GN0LtafXeYC80VbaCWG4b
fetching features for id: 6bjT2fsUi1pOc9gk1mB9w7
fetching features for id: 5oO9f7KhTM8qo1fJ92Z3sC
fetching features for id: 2K9dRneu5EzYZ2Fcv7SEM1
fetching features for id: 4vBDAxKllafwCMDWD76atv
fetching features for id: 7upcKagl9OmfV6ETI6dH8S
fetching features for id: 6Teu9cfIt31Y9xQtkUPUQj
fetching features for id: 4gJfiNBHobaCb52NuYFmw9
fetching features for id: 2MWxpIwgwY4wc4tFs6NyhS
fetching features for id: 3XToK0X499i9odkrDCG4FL
fetching features for id: 3Am7HbnChqAmGexDhhO2Bw
fetching features for id: 4VfHt3eRQcXo50kWdtdcXV
fetching features for id: 3G7spgNysEke1Fr8ZYYbwg
fetching features for id: 6aM09078f3E1jpRGc9tuLE
fetching features for id: 3KzgdYUlqV6TOG7JCmx2Wg
fetching features for id: 02IsRDM3VwxibJTEeloHG9
fetching features for id: 7moaXjSX1RHksmk6qanoPY
fetching features for id: 36NPEs4S7ik50NrlzaqoIJ
fetching features for id: 1RYznli2VNO7FCbW1Hq4KM
fetching features for id: 1R4kAzLGI0PQBe32NAGP40
fetching features for id: 429ZSPvrxPnhcF9b1WdNdk
fetching features for id: 4aLLhVxmXDMsjNHUvDKjuw
fetching features for id: 6uQlc0WC4jGIJhIKCIfHJo
fetching features for id: 4YmJXCXCkLorPkYiSPFUN3
fetching features for id: 1ZsHsqO3nrafksXkOBeUXG
fetching features for id: 6s4b3H4BrUCfskxYinSSA8
fetching features for id: 5z0lOvhjDBD1doNvVUHdXw
fetching features for id: 6JHWHuirGmD0x2PKINh0v8
fetching features for id: 1h3IWntF2SMKv8cvrPgl91
fetching features for id: 2Wg92bsfweigVksgH5FtkJ
fetching features for id: 4Ta3ldW6fXAO2PVbvoXU6J
fetching features for id: 4f8hBeMXMvssn6HtFAtblo
fetching features for id: 3kpuidZaDOsLP3YgFi8sxu
fetching features for id: 6t60WZiWBwEcSm7yZYwauK
fetching features for id: 2gWAdYak6pGXWDG9phdesr
fetching features for id: 2uTP2SyKzocaFtCdcwf9GF
fetching features for id: 6UO3m5VERw0IOI0fCXwcvJ
fetching features for id: 38rz74MI3lV25ZfhABoQv9
fetching features for id: 19i5F6KdCVpClxFHvIwNa3
fetching features for id: 1xYZbzKH9rRC2x0tiOVgB7
fetching features for id: 2MnlN2UropWELLdYv4Pj3c
fetching features for id: 15mwNCoUIJJ1dvtNeSbY7J
fetching features for id: 44k5V6EswuTCPYVvLveN3C
fetching features for id: 7vu51HpremcwWKhBhVccDB
fetching features for id: 3C1d2Sjd1LDqFaLVkzsLbW
fetching features for id: 7yYLYKASE9MHmL4n51vAtC
fetching features for id: 0tgVYYVcMbfZcHBZ1gF8OX
fetching features for id: 2p7wFvRxQ4GLgVB5b6mr6U
fetching features for id: 0crCWOYbMWjlC4pnbMqSyV
fetching features for id: 4T1YDePBqGXRnTJMCi4nXm
fetching features for id: 0CRxGsJrwOMQUuTr4yT5mX
fetching features for id: 37Au0ObjCx4KD76YBe4Y9g
fetching features for id: 58rE0XBpWlcupuTU2nso8j
fetching features for id: 076MbdGIiRAfHrGuXXPmkz
fetching features for id: 3pQQNCx3E2k41oEWJje38o
fetching features for id: 7lNtHO9IJFQJIVyahuZ7hY
fetching features for id: 6aJI1JpnsJ85NjV8rm5IAt
fetching features for id: 7vF9CfYTUiif1wHSFdrTyN
fetching features for id: 6nq7oPBl9iju4FAd6Rt7QR
fetching features for id: 1hi6oZWC8g3dBaUTEj5zDG
fetching features for id: 4W7ps5zORgRWe691hlxXzC
fetching features for id: 1tM2BQzEMJ5f9siQ9ZjM6P
fetching features for id: 5wwKRbqWdHVNNkJOIxXWMf
fetching features for id: 2C69yvwxsoQEti1GFx9MHj
fetching features for id: 5RyzUKTwaiSI2A8ZugBIzn
fetching features for id: 6izmQUGcOH6W8A7Z30qygh
fetching features for id: 5opP1p1Ulc1Q6Sx5pZ3eYy
fetching features for id: 0BJHzqk1agz54Q3YzvcRez
fetching features for id: 5ssXU6u9HqUW87W2gglY7F
fetching features for id: 1iu7A7vOv1andAcWxcIPPF
fetching features for id: 24fo7tdfOXJ6mlRl8PMup8
fetching features for id: 3KC4PSP9TGhz79XCPYTNoS
fetching features for id: 1M24NmdiAvtOCxkqyfVFdJ
fetching features for id: 3GKYmZUBoj3mD2SBUmnAHB
fetching features for id: 2kV7S1nOw4Jfo8rapygdmd
fetching features for id: 3E5ndyOfO6vFDEIE42HA8o
fetching features for id: 5HVBoRtqnH8ucSm5KFdGYG
fetching features for id: 1T5SRm5reEOzwZHrQ6dYFR
fetching features for id: 2LEgF8sPiwfaxVyAFWHtzK
fetching features for id: 2F4Th5TUzfOIGH3AgJprbj
fetching features for id: 3MMSrLLxgZ63dljiQeD5Hq
fetching features for id: 10cjs3lGQ2lDfD1mKkduhE
fetching features for id: 3pEwXiu1AVstyAluolJnW7
fetching features for id: 6i9vakbAAPjn7qEUv3BNos
fetching features for id: 3mc4hySxLwW78CIR1R89dg
fetching features for id: 2jfyf0Yspx4fQfP3FtDr50
fetching features for id: 1ndoZF7C8Ada2SSNxBEsMj
fetching features for id: 1rH9Pgv6D3qvZcRjlTNyUS
fetching features for id: 6yeJW5uiuqb3rbUduZ5UBG
fetching features for id: 5ycXdeVKHGBiGLpdkUtl15
fetching features for id: 5zvOXJrzzUlvXwyuwZ0toZ
fetching features for id: 2UJENcUk6T8kICFcuFtWsA
fetching features for id: 7oLItNFdvAwu8wwMohOvZH
fetching features for id: 5O11gVu4Sm3B7yjQ2rELoY
fetching features for id: 1BogZ2vCN97dA7dYvTYeou
fetching features for id: 2OZel3ZBLmTT1bZBWOZ8fS
fetching features for id: 3QMloujG5pu38kQxiQxVxE
fetching features for id: 43fPIcXcSaEAOaHywu4nsW
fetching features for id: 6Gwqyk0JyoMo0oVHwX7eKG
fetching features for id: 6nCRUa8xPE59AyOfZBr3Qt
fetching features for id: 2MfIhWJD9lTXKTolmYdXCP
fetching features for id: 3UCHPG6VkGQzVXm69rd6f6
fetching features for id: 1hnTt2qyVYnxUfmCEdDUSb
fetching features for id: 5UNgEkAGHeLHvAVNdi4okj
fetching features for id: 1IV8T9xWxJc0tSEFJit25F
fetching features for id: 4uxsv9PjV3Yeyn51RdWvGJ
fetching features for id: 3GCrbnKJ2f9276Kx4KD5Y7
fetching features for id: 3ConsOVpS2L6rejAUcljVS
fetching features for id: 5pV0BHdDqxKnBrxPxjnS10
fetching features for id: 4hFhb8YNgPc94Fq0NANpIB
fetching features for id: 0fFY4JvHfSkChtwlGNtE38
fetching features for id: 3zeTHjRg21dMPBzAUW3Vve
fetching features for id: 4BOp0Ddc2K357ok52GC4zn
fetching features for id: 5OA3zJIbQytfulfE9tXEX1
fetching features for id: 1srKo8pRCDoPMIuLycsaQ2
fetching features for id: 6KceGfo54oVMtf7mc2m9DM
fetching features for id: 6PdbcsYRKKA8FEo91qPRqF
fetching features for id: 17tMfES3QZ4eu5gmxaDKsx
fetching features for id: 2mfo3AK0aZzTGcXD0LnLqx
fetching features for id: 4W6VKRCjQw8tFRU8pu3ExW
fetching features for id: 3yaDHPIsjORc43AdGbYvrG
fetching features for id: 1mMSH14XSSjzLvNuf62azx
fetching features for id: 2Zl4RbIpuFSLLdjDznEVU2
fetching features for id: 2bvKFIpuoWJprEiMdu0TU8
fetching features for id: 3QqafgZ1FJVKOA0Nl0yKxm
fetching features for id: 4U3S2BgQeX25uW0fEPDSSt
fetching features for id: 2oiLl1u7qg6LZZA595Ok4d
fetching features for id: 6voBht3aVPwQWiwSP2fEAe
Esecuzione completata in 1191.7362 secondi
</code>
#Inserisco audio features nel dataset Billboard_____no_output_____
<code>
num_datapoints = np.array(output).shape[0]
output = np.array(output).reshape((num_datapoints,14))_____no_output_____# creo backup df_billboard
df_billboard_bak = df_billboard.copy()
# filtro dataset billboard tenendo solo id nell'array 'ids_new', quindi quelli che non sono presenti nel dataset principale
df_billboard = df_billboard[df_billboard.id.isin(ids_new)]_____no_output_____to_insert = ['danceability',
'energy',
'key',
'loudness',
'mode',
'speechiness',
'acousticness',
'instrumentalness',
'liveness',
'valence',
'tempo',
'duration_ms',
'release_date',
'explicit']
for i, col in enumerate(output.T):
df_billboard.insert(4, to_insert[i], col)_____no_output_____# converto colonna 'release_date' in tipo datetime
df_billboard.release_date = pd.to_datetime(df_billboard.release_date,format="%Y-%m-%d",exact=False)_____no_output_____# inserisco colonna 'year'
year = df_billboard['release_date'].apply(lambda x: int(x.year))
df_billboard.insert(6, 'year', year)_____no_output_____# inserisco colonna 'popularity' --> nb: inizializzo a 0 perchè verrà rimossa
df_billboard.insert(17, 'popularity', np.zeros(df_billboard.shape[0]))_____no_output_____# inserisco colonna 'hit'
hit = np.ones(df_billboard.shape[0])
df_billboard.insert(3, 'hit', hit)
df_billboard.hit = df_billboard.hit.apply(int)_____no_output_____df_billboard.head()_____no_output_____
</code>
#Esporto_____no_output_____
<code>
# esporto in google drive
from google.colab import drive
# mounts the google drive to Colab Notebook
drive.mount('/content/drive',force_remount=True)
df_billboard.to_csv('/content/drive/My Drive/Colab Notebooks/datasets/billboard+features_3.csv')Mounted at /content/drive
</code>
| {
"repository": "isacco-v/hit-song-prediction",
"path": "dataset_integration_3.ipynb",
"matched_keywords": [
"STAR",
"biology"
],
"stars": null,
"size": 551753,
"hexsha": "d0bf2273f348618757c525a4ecc2e2c974d60c97",
"max_line_length": 551753,
"avg_line_length": 551753,
"alphanum_fraction": 0.7511440808
} |
# Notebook from J-81/ProteinResearch_TechManuals
Path: notebooks/Common_Structure_Analysis_Tasks.ipynb
# Contents
## Working with Structures
1. TBA : [Downloading-PDB-Structures](#"Downloading-PDB-Structures")
1. TBA : [Calculating-Minimum-Alpha-Carbon-Distances](#"Calculating-RMSD")
### Note: Most of this is distilled from the Biopython Documentation [link](http://biopython.org/DIST/docs/tutorial/Tutorial.html)_____no_output_____
| {
"repository": "J-81/ProteinResearch_TechManuals",
"path": "notebooks/Common_Structure_Analysis_Tasks.ipynb",
"matched_keywords": [
"BioPython"
],
"stars": null,
"size": 1015,
"hexsha": "d0bf99f9c4e31e128ea66bcff563bf0794b3369b",
"max_line_length": 136,
"avg_line_length": 22.0652173913,
"alphanum_fraction": 0.5665024631
} |
# Notebook from jonasvdd/DS-python-data-analysis
Path: _solved/case2_observations_analysis.ipynb
<p><font size="6"><b> CASE - Observation data - analysis</b></font></p>
> *© 2021, Joris Van den Bossche and Stijn Van Hoey (<mailto:[email protected]>, <mailto:[email protected]>). Licensed under [CC BY 4.0 Creative Commons](http://creativecommons.org/licenses/by/4.0/)*
---_____no_output_____
<code>
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
plt.style.use('seaborn-whitegrid')_____no_output_____
</code>
## 1. Reading in the enriched observations data_____no_output_____<div class="alert alert-success">
**EXERCISE**
- Read in the `survey_data_completed.csv` file and save the resulting `DataFrame` as variable `survey_data_processed` (if you did not complete the previous notebook, a version of the csv file is available in the `data` folder).
- Interpret the 'eventDate' column directly as python `datetime` objects and make sure the 'occurrenceID' column is used as the index of the resulting DataFrame (both can be done at once when reading the csv file using parameters of the `read_csv` function)
- Inspect the first five rows of the DataFrame and the data types of each of the data columns. Verify that the 'eventDate' indeed has a datetime data type.
<details><summary>Hints</summary>
- All read functions in Pandas start with `pd.read_...`.
- To check the documentation of a function, use the keystroke combination of SHIFT + TAB when the cursor is on the function.
- Remember `.head()` and `.info()`?
</details>
</div>_____no_output_____
<code>
survey_data_processed = pd.read_csv("data/survey_data_completed.csv",
parse_dates=['eventDate'], index_col="occurrenceID")_____no_output_____survey_data_processed.head()_____no_output_____survey_data_processed.info()<class 'pandas.core.frame.DataFrame'>
Int64Index: 35550 entries, 1 to 35550
Data columns (total 19 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 verbatimLocality 35550 non-null int64
1 verbatimSex 33042 non-null object
2 wgt 32283 non-null float64
3 datasetName 35550 non-null object
4 sex 33041 non-null object
5 eventDate 35550 non-null datetime64[ns]
6 decimalLongitude 35550 non-null float64
7 decimalLatitude 35550 non-null float64
8 genus 33535 non-null object
9 species 33535 non-null object
10 taxa 33535 non-null object
11 name 33535 non-null object
12 class 33448 non-null object
13 kingdom 33448 non-null object
14 order 33448 non-null object
15 phylum 33448 non-null object
16 scientificName 33448 non-null object
17 status 33448 non-null object
18 usageKey 33448 non-null float64
dtypes: datetime64[ns](1), float64(4), int64(1), object(13)
memory usage: 5.4+ MB
</code>
## 2. Tackle missing values (NaN) and duplicate values_____no_output_____See [pandas_08_missing_values.ipynb](pandas_08_missing_values.ipynb) for an overview of functionality to work with missing values._____no_output_____<div class="alert alert-success">
**EXERCISE**
How many records in the data set have no information about the `species`? Use the `isna()` method to find out.
<details><summary>Hints</summary>
- Do NOT use `survey_data_processed['species'] == np.nan`, but use the available method `isna()` to check if a value is NaN
- The result of an (element-wise) condition returns a set of True/False values, corresponding to 1/0 values. The amount of True values is equal to the sum.
</details>_____no_output_____
<code>
survey_data_processed['species'].isna().sum()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
How many duplicate records are present in the dataset? Use the method `duplicated()` to check if a row is a duplicate.
<details><summary>Hints</summary>
- The result of an (element-wise) condition returns a set of True/False values, corresponding to 1/0 values. The amount of True values is equal to the sum.
</details>_____no_output_____
<code>
survey_data_processed.duplicated().sum()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Select all duplicate data by filtering the `observations` data and assign the result to a new variable `duplicate_observations`. The `duplicated()` method provides a `keep` argument define which duplicates (if any) to mark.
- Sort the `duplicate_observations` data on both the columns `eventDate` and `verbatimLocality` and show the first 9 records.
<details><summary>Hints</summary>
- Check the documentation of the `duplicated` method to find out which value the argument `keep` requires to select all duplicate data.
- `sort_values()` can work with a single columns name as well as a list of names.
</details>_____no_output_____
<code>
duplicate_observations = survey_data_processed[survey_data_processed.duplicated(keep=False)]
duplicate_observations.sort_values(["eventDate", "verbatimLocality"]).head(9)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Exclude the duplicate values (i.e. keep the first occurrence while removing the other ones) from the `observations` data set and save the result as `survey_data_unique`. Use the `drop duplicates()` method from Pandas.
- How many observations are still left in the data set?
<details><summary>Hints</summary>
- `keep=First` is the default option for `drop_duplicates`
- The number of rows in a DataFrame is equal to the `len`gth
</details>_____no_output_____
<code>
survey_data_unique = survey_data_processed.drop_duplicates()_____no_output_____len(survey_data_unique)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Use the `dropna()` method to find out:
- For how many observations (rows) we have all the information available (i.e. no NaN values in any of the columns)?
- For how many observations (rows) we do have the `species_ID` data available ?
<details><summary>Hints</summary>
- `dropna` by default removes by default all rows for which _any_ of the columns contains a `NaN` value.
- To specify which specific columns to check, use the `subset` argument
</details>_____no_output_____
<code>
len(survey_data_unique.dropna()), len(survey_data_unique.dropna(subset=['species']))_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Filter the `survey_data_unique` data and select only those records that do not have a `species` while having information on the `sex`. Store the result as variable `not_identified`.
<details><summary>Hints</summary>
- To combine logical operators element-wise in Pandas, use the `&` operator.
- Pandas provides both a `isna()` and a `notna()` method to check the existence of `NaN` values.
</details>_____no_output_____
<code>
mask = survey_data_unique['species'].isna() & survey_data_unique['sex'].notna()
not_identified = survey_data_unique[mask]_____no_output_____not_identified.head()_____no_output_____
</code>
__NOTE!__
The `DataFrame` we will use in the further analyses contains species information:_____no_output_____
<code>
survey_data = survey_data_unique.dropna(subset=['species']).copy()
survey_data['name'] = survey_data['genus'] + ' ' + survey_data['species']_____no_output_____
</code>
<div class="alert alert-info">
**INFO**
For biodiversity studies, absence values (knowing that something is not present) are useful as well to normalize the observations, but this is out of scope for these exercises.
</div>_____no_output_____## 3. Select subsets of the data_____no_output_____
<code>
survey_data['taxa'].value_counts()
#survey_data.groupby('taxa').size()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Select the observations for which the `taxa` is equal to 'Rabbit', 'Bird' or 'Reptile'. Assign the result to a variable `non_rodent_species`. Use the `isin` method for the selection.
<details><summary>Hints</summary>
- You do not have to combine three different conditions, but use the `isin` operator with a list of names.
</details>_____no_output_____
<code>
non_rodent_species = survey_data[survey_data['taxa'].isin(['Rabbit', 'Bird', 'Reptile'])]
non_rodent_species.head()_____no_output_____len(non_rodent_species)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Select the observations for which the `name` starts with the characters 'r' (make sure it does not matter if a capital character is used in the 'taxa' name). Call the resulting variable `r_species`.
<details><summary>Hints</summary>
- Remember the `.str.` construction to provide all kind of string functionalities? You can combine multiple of these after each other.
- If the presence of capital letters should not matter, make everything lowercase first before comparing (`.lower()`)
</details>_____no_output_____
<code>
r_species = survey_data[survey_data['name'].str.lower().str.startswith('r')]
r_species.head()_____no_output_____len(r_species)_____no_output_____r_species["name"].value_counts()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Select the observations that are not Birds. Call the resulting variable <code>non_bird_species</code>.
<details><summary>Hints</summary>
- Logical operators like `==`, `!=`, `>`,... can still be used.
</details>_____no_output_____
<code>
non_bird_species = survey_data[survey_data['taxa'] != 'Bird']
non_bird_species.head()_____no_output_____len(non_bird_species)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Select the __Bird__ (taxa is Bird) observations from 1985-01 till 1989-12 using the `eventDate` column. Call the resulting variable `birds_85_89`.
<details><summary>Hints</summary>
- No hints, you can do this! (with the help of some `<=` and `&`, and don't forget the put brackets around each comparison that you combine)
</details>_____no_output_____
<code>
birds_85_89 = survey_data[(survey_data["eventDate"] >= "1985-01-01")
& (survey_data["eventDate"] <= "1989-12-31 23:59")
& (survey_data['taxa'] == 'Bird')]
birds_85_89.head()_____no_output_____
</code>
Alternative solution:_____no_output_____
<code>
# alternative solution
birds_85_89 = survey_data[(survey_data["eventDate"].dt.year >= 1985)
& (survey_data["eventDate"].dt.year <= 1989)
& (survey_data['taxa'] == 'Bird')]
birds_85_89.head()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Drop the observations for which no 'weight' (`wgt` column) information is available.
- On the filtered data, compare the median weight for each of the species (use the `name` column)
- Sort the output from high to low median weight (i.e. descending)
__Note__ You can do this all in a single line statement, but don't have to do it as such!
<details><summary>Hints</summary>
- You will need `dropna`, `groupby`, `median` and `sort_values`.
</details>_____no_output_____
<code>
# Multiple lines
obs_with_weight = survey_data.dropna(subset=["wgt"])
median_weight = obs_with_weight.groupby(['name'])["wgt"].median()
median_weight.sort_values(ascending=False)_____no_output_____# Single line statement
(survey_data
.dropna(subset=["wgt"])
.groupby(['name'])["wgt"]
.median()
.sort_values(ascending=False)
)_____no_output_____
</code>
## 4. Species abundance_____no_output_____<div class="alert alert-success">
**EXERCISE**
Which 8 species (use the `name` column to identify the different species) have been observed most over the entire data set?
<details><summary>Hints</summary>
- Pandas provide a function to combine sorting and showing the first n records, see [here](https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.nlargest.html)...
</details>_____no_output_____
<code>
survey_data.groupby("name").size().nlargest(8)_____no_output_____survey_data['name'].value_counts()[:8]_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- What is the number of different species in each of the `verbatimLocality` plots? Use the `nunique` method. Assign the output to a new variable `n_species_per_plot`.
- Define a Matplotlib `Figure` (`fig`) and `Axes` (`ax`) to prepare a plot. Make an horizontal bar chart using Pandas `plot` function linked to the just created Matplotlib `ax`. Each bar represents the `species per plot/verbatimLocality`. Change the y-label to 'Plot number'.
<details><summary>Hints</summary>
- _...in each of the..._ should provide a hint to use `groupby` for this exercise. The `nunique` is the aggregation function for each of the groups.
- `fig, ax = plt.subplots()` prepares a Matplotlib Figure and Axes.
</details>_____no_output_____
<code>
n_species_per_plot = survey_data.groupby(["verbatimLocality"])["name"].nunique()
fig, ax = plt.subplots(figsize=(6, 6))
n_species_per_plot.plot(kind="barh", ax=ax, color="lightblue")
ax.set_ylabel("plot number")
# Alternative option:
# inspired on the pivot table we already had:
# species_per_plot = survey_data.reset_index().pivot_table(
# index="name", columns="verbatimLocality", values="occurrenceID", aggfunc='count')
# n_species_per_plot = species_per_plot.count()_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- What is the number of plots (`verbatimLocality`) each of the species have been observed in? Assign the output to a new variable `n_plots_per_species`. Sort the counts from low to high.
- Make an horizontal bar chart using Pandas `plot` function to show the number of plots each of the species was found (using the `n_plots_per_species` variable).
<details><summary>Hints</summary>
- Use the previous exercise to solve this one.
</details>_____no_output_____
<code>
n_plots_per_species = survey_data.groupby(["name"])["verbatimLocality"].nunique().sort_values()
fig, ax = plt.subplots(figsize=(8, 8))
n_plots_per_species.plot(kind="barh", ax=ax, color='0.4')
ax.set_xlabel("Number of plots");
ax.set_ylabel("");_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Starting from the `survey_data`, calculate the amount of males and females present in each of the plots (`verbatimLocality`). The result should return the counts for each of the combinations of `sex` and `verbatimLocality`. Assign to a new variable `n_plot_sex` and ensure the counts are in a column named "count".
- Use `pivot` to convert the `n_plot_sex` DataFrame to a new DataFrame with the `verbatimLocality` as index and `male`/`female` as column names. Assign to a new variable `pivoted`.
<details><summary>Hints</summary>
- _...for each of the combinations..._ `groupby` can also be used with multiple columns at the same time.
- If a `groupby` operation gives a Series as result, you can give that Series a name with the `.rename(..)` method.
- `reset_index()` is useful function to convert multiple indices into columns again.
</details>_____no_output_____
<code>
n_plot_sex = survey_data.groupby(["sex", "verbatimLocality"]).size().rename("count").reset_index()
n_plot_sex.head()_____no_output_____pivoted = n_plot_sex.pivot(columns="sex", index="verbatimLocality", values="count")_____no_output_____pivoted.head()_____no_output_____
</code>
To check, we can use the variable `pivoted` to plot the result:_____no_output_____
<code>
pivoted.plot(kind='bar', figsize=(12, 6), rot=0)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Recreate the previous plot with the `catplot` function from the Seaborn library starting from `n_plot_sex`.
<details><summary>Hints</summary>
- Check the `kind` argument of the `catplot` function to figure out to specify you want a barplot with given x and y values.
- To link a column to different colors, use the `hue` argument
</details>_____no_output_____
<code>
sns.catplot(data=n_plot_sex, x="verbatimLocality", y="count",
hue="sex", kind="bar", height=3, aspect=3)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Recreate the previous plot with the `catplot` function from the Seaborn library directly starting from `survey_data`.
<details><summary>Hints</summary>
- Check the `kind`argument of the `catplot` function to find out how to use counts to define the bars instead of a `y` value.
- To link a column to different colors, use the `hue` argument
</details>_____no_output_____
<code>
sns.catplot(data=survey_data, x="verbatimLocality",
hue="sex", kind="count", height=3, aspect=3)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
- Make a summary table with the number of records of each of the species in each of the plots (also called `verbatimLocality`). Each of the species `name`s is a row index and each of the `verbatimLocality` plots is a column name.
- Using the Seaborn <a href="http://seaborn.pydata.org/generated/seaborn.heatmap.html">documentation</a> to make a heatmap.
<details><summary>Hints</summary>
- Make sure to pass the correct columns to respectively the `index`, `columns`, `values` and `aggfunc` parameters of the `pivot_table` function. You can use the `datasetName` to count the number of observations for each name/locality combination (when counting rows, the exact column doesn't matter).
</details>_____no_output_____
<code>
species_per_plot = survey_data.pivot_table(index="name",
columns="verbatimLocality",
values="datasetName",
aggfunc='count')
# alternative ways to calculate this
#species_per_plot = survey_data.groupby(['name', 'verbatimLocality']).size().unstack(level=-1)
#pecies_per_plot = pd.crosstab(survey_data['name'], survey_data['verbatimLocality'])_____no_output_____fig, ax = plt.subplots(figsize=(8,8))
sns.heatmap(species_per_plot, ax=ax, cmap='Greens')_____no_output_____
</code>
## 5. Observations over time_____no_output_____<div class="alert alert-success">
**EXERCISE**
Make a plot visualizing the evolution of the number of observations for each of the individual __years__ (i.e. annual counts) using the `resample` method.
<details><summary>Hints</summary>
- You want to `resample` the data using the `eventDate` column to create annual counts. If the index is not a datetime-index, you can use the `on=` keyword to specify which datetime column to use.
- `resample` needs an aggregation function on how to combine the values within a single 'group' (in this case data within a year). In this example, we want to know the `size` of each group, i.e. the number of records within each year.
</details>_____no_output_____
<code>
survey_data.resample('A', on='eventDate').size().plot()_____no_output_____
</code>
To evaluate the intensity or number of occurrences during different time spans, a heatmap is an interesting representation._____no_output_____<div class="alert alert-success">
**EXERCISE**
- Create a table, called `heatmap_prep`, based on the `survey_data` DataFrame with the row index the individual years, in the column the months of the year (1-> 12) and as values of the table, the counts for each of these year/month combinations.
- Using the seaborn <a href="http://seaborn.pydata.org/generated/seaborn.heatmap.html">documentation</a>, make a heatmap starting from the `heatmap_prep` variable.
<details><summary>Hints</summary>
- The `.dt` accessor can be used to get the `year`, `month`,... from a `datetime` column
- Use `pivot_table` and provide the years to `index` and the months to `columns`. Do not forget to `count` the number for each combination (`aggfunc`).
- Seaborn has an `heatmap` function which requires a short-form DataFrame, comparable to giving each element in a table a color value.
</details>_____no_output_____
<code>
heatmap_prep = survey_data.pivot_table(index=survey_data['eventDate'].dt.year,
columns=survey_data['eventDate'].dt.month,
values='species', aggfunc='count')
fig, ax = plt.subplots(figsize=(10, 8))
ax = sns.heatmap(heatmap_prep, cmap='Reds')_____no_output_____
</code>
Remark that we started from a `tidy` data format (also called *long* format) and converted to *short* format with in the row index the years, in the column the months and the counts for each of these year/month combinations as values._____no_output_____## (OPTIONAL SECTION) 6. Evolution of species during monitoring period_____no_output_____*In this section, all plots can be made with the embedded Pandas plot function, unless specificly asked*_____no_output_____<div class="alert alert-success">
**EXERCISE**
Plot using Pandas `plot` function the number of records for `Dipodomys merriami` for each month of the year (January (1) -> December (12)), aggregated over all years.
<details><summary>Hints</summary>
- _...for each month of..._ requires `groupby`.
- `resample` is not useful here, as we do not want to change the time-interval, but look at month of the year (over all years)
</details>_____no_output_____
<code>
merriami = survey_data[survey_data["name"] == "Dipodomys merriami"]_____no_output_____fig, ax = plt.subplots()
merriami.groupby(merriami['eventDate'].dt.month).size().plot(kind="barh", ax=ax)
ax.set_xlabel("number of occurrences");
ax.set_ylabel("Month of the year");_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Plot, for the species 'Dipodomys merriami', 'Dipodomys ordii', 'Reithrodontomys megalotis' and 'Chaetodipus baileyi', the monthly number of records as a function of time during the monitoring period. Plot each of the individual species in a separate subplot and provide them all with the same y-axis scale
<details><summary>Hints</summary>
- `isin` is useful to select from within a list of elements.
- `groupby` AND `resample` need to be combined. We do want to change the time-interval to represent data as a function of time (`resample`) and we want to do this _for each name/species_ (`groupby`). The order matters!
- `unstack` is a Pandas function a bit similar to `pivot`. Check the [unstack documentation](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.unstack.html) as it might be helpful for this exercise.
</details>_____no_output_____
<code>
subsetspecies = survey_data[survey_data["name"].isin(['Dipodomys merriami', 'Dipodomys ordii',
'Reithrodontomys megalotis', 'Chaetodipus baileyi'])]_____no_output_____month_evolution = subsetspecies.groupby("name").resample('M', on='eventDate').size()_____no_output_____species_evolution = month_evolution.unstack(level=0)
axs = species_evolution.plot(subplots=True, figsize=(14, 8), sharey=True)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Recreate the same plot as in the previous exercise using Seaborn `relplot` functon with the `month_evolution` variable.
<details><summary>Hints</summary>
- We want to have the `counts` as a function of `eventDate`, so link these columns to y and x respectively.
- To create subplots in Seaborn, the usage of _facetting_ (splitting data sets to multiple facets) is used by linking a column name to the `row`/`col` parameter.
- Using `height` and `widht`, the figure size can be optimized.
</details>_____no_output_____Uncomment the next cell (calculates `month_evolution`, the intermediate result of the previous excercise):_____no_output_____
<code>
# Given as solution..
subsetspecies = survey_data[survey_data["name"].isin(['Dipodomys merriami', 'Dipodomys ordii',
'Reithrodontomys megalotis', 'Chaetodipus baileyi'])]
month_evolution = subsetspecies.groupby("name").resample('M', on='eventDate').size().rename("counts")
month_evolution = month_evolution.reset_index()
month_evolution.head()_____no_output_____
</code>
Plotting with seaborn:_____no_output_____
<code>
sns.relplot(data=month_evolution, x='eventDate', y="counts",
row="name", kind="line", hue="name", height=2, aspect=5)_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
Plot the annual amount of occurrences for each of the 'taxa' as a function of time using Seaborn. Plot each taxa in a separate subplot and do not share the y-axis among the facets.
<details><summary>Hints</summary>
- Combine `resample` and `groupby`!
- Check out the previous exercise for the plot function.
- Pass the `sharey=False` to the `facet_kws` argument as a dictionary.
</details>_____no_output_____
<code>
year_evolution = survey_data.groupby("taxa").resample('A', on='eventDate').size()
year_evolution.name = "counts"
year_evolution = year_evolution.reset_index()_____no_output_____sns.relplot(data=year_evolution, x='eventDate', y="counts",
col="taxa", col_wrap=2, kind="line", height=2, aspect=5,
facet_kws={"sharey": False})_____no_output_____
</code>
<div class="alert alert-success">
**EXERCISE**
The observations where taken by volunteers. You wonder on which day of the week the most observations where done. Calculate for each day of the week (`dayofweek`) the number of observations and make a bar plot.
<details><summary>Hints</summary>
- Did you know the Python standard Library has a module `calendar` which contains names of week days, month names,...?
</details>_____no_output_____
<code>
fig, ax = plt.subplots()
survey_data.groupby(survey_data["eventDate"].dt.dayofweek).size().plot(kind='barh', color='#66b266', ax=ax)
import calendar
xticks = ax.set_yticklabels(calendar.day_name)_____no_output_____
</code>
Nice work!_____no_output_____
| {
"repository": "jonasvdd/DS-python-data-analysis",
"path": "_solved/case2_observations_analysis.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 4,
"size": 705834,
"hexsha": "d0c0d413c8206fe7e2a2d3aa8ed4a49025b70d8a",
"max_line_length": 105696,
"avg_line_length": 176.4143964009,
"alphanum_fraction": 0.8619632945
} |
# Notebook from AfroditiMariaZaki/OxCompBio
Path: tutorials/MD/04_Trajectory_Analysis_Solutions.ipynb
# <span style='color:darkred'> 4 Trajectory Analysis </span>
***_____no_output_____**<span style='color:darkred'> Important Note </span>**
Before proceeding to the rest of the analysis, it is a good time to define a path that points to the location of the MD simulation data, which we will analyze here.
If you successfully ran the MD simulation, the correct path should be:_____no_output_____
<code>
path="OxCompBio-Datafiles/run"_____no_output_____
</code>
If however, you need/want to use the data from the simulation that has been already performed, uncomment the command below to instead define the path that points to the prerun simulation._____no_output_____
<code>
#path="OxCompBio-Datafiles/prerun/run"_____no_output_____
</code>
## <span style='color:darkred'> 4.1 Visualize the simulation </span>
The simplest and easiest type of analysis you should always do is to look at it with your eyes! Your eyes will tell you if something strange is happening immediately. A numerical analysis may not.
### <span style='color:darkred'> 4.1.1 VMD </span>
*Note: Again, this step is optional. If you don't have VMD, go to section 4.1.2 below to visualize the trajectory with NGLView instead.*
Let us look at the simulations on VMD.
Open your vmd, by typing on your terminal:
`% vmd`
When it has finished placing all the windows on the screen. Click on `File` in the VMD main menu window and select `New Molecule`. The Molecule File Browser window should appear. Click on `Browse...` then select the `OxCompBio-Datafiles` and then the `run` directory and finally select `em.gro` (i.e. the file you made that has protein system energy minimized). Click `OK` and then click `Load`. It should load up the starting coordinates into the main window. Then click `Browse...` in the Molecule File Browser window. Select again the `OxCompBio-Datafiles`, then the `run` directory and then `md.xtc`. Select `OK` and then hit `Load`. The trajectory should start loading into the main VMD window.
Although things will be moving, you can see that it is quite difficult to visualize the individual components. That is one of the problems with simulating such large and complicated systems. VMD makes it quite easy to look at individual components of a system. For example, let us consider the protein only. On the VMD Main menu, left-click on Graphics and select `Representations`. A new menu will appear (`Graphical Representations`). In the box entitled `Selected Atoms` type protein and hit enter. Only those atoms that form part of the protein are now selected. Various other selections and drawing methods will help to visualize different aspects of the simulation. _____no_output_____<span style='color:Blue'> **Questions** </span>
* How would you say the protein behaves?
* Is it doing anything unexpected? What would you consider unexpected behaviour?_____no_output_____### <span style='color:darkred'> 4.1.2 NGLView </span>
_____no_output_____You have already tested NGLView at the Python tutorial (Notebook `12_ProteinAnalysis`) and at the beginning of this tutorial. This time however, you can visualize the trajectory you generated after carrying out the MD simulation.
You should also be familiar now with the MDAnalysis Python library that we will use to analyze the MD trajectory. We will also use it below, to create a Universe and load it on NGLView._____no_output_____
<code>
# Import MDAnalysis and NGLView
import MDAnalysis
import nglview
# Load the protein structure and the trajectory as a universe named protein
protein=MDAnalysis.Universe(f"{path}/em.gro", f"{path}/md_fit.xtc")
protein_view = nglview.show_mdanalysis(protein)
protein_view.gui_style = 'ngl'
#Color the protein based on its secondary structure
protein_view.update_cartoon(color='sstruc')
protein_view
_____no_output_____
</code>
<span style='color:Blue'> **Questions** </span>
* How would you say the protein behaves?
* Is it doing anything unexpected? What would you consider unexpected behaviour?_____no_output_____Now that we are sure the simulation is not doing anything ridiculous, we can start to ask questions about the simulation. The first thing to establish is whether the simulation has equilibrated to some state. So what are some measures of the system
being equilibrated? And what can we use to test the reliability of the simulation?_____no_output_____## <span style='color:darkred'> 4.2 System Equilibration </span> _____no_output_____### <span style='color:darkred'> 4.2.1 Temperature fluctuation </span>
The system temperature as a function of time was calculated in the previous section, with the built-in GROMACS tool `gmx energy`, but we still have not looked at it. It is now time to plot the temperature *vs* time and assess the results.
<span style='color:Blue'> **Questions** </span>
* Does the temperature fluctuate around an equilibrium value?
* Does this value correspond to the temperature that we predefined in the `md.mdp` input file?_____no_output_____Import numpy and pyplot from matplotlib, required to read and plot the data, respectively._____no_output_____
<code>
# We declare matplotlib inline to make sure it plots properly
%matplotlib inline
# We need to import numpy
import numpy as np
# We need pyplot from matplotlib to generate our plots
from matplotlib import pyplot_____no_output_____
</code>
Now, using numpy, we can read the data from the `1hsg_temperature.xvg` file; the first column is the time (in ps) and the secong is the system temperature (in K)._____no_output_____
<code>
# Read the file that contains the system temperature for each frame
time=np.loadtxt(f"{path}/1hsg_temperature.xvg", comments=['#','@'])[:, 0]
temperature=np.loadtxt(f"{path}/1hsg_temperature.xvg", comments=['#','@'])[:, 1] _____no_output_____
</code>
You can use numpy again to compute the average temperature and its standard deviation._____no_output_____
<code>
# Calculate and print the mean temperature and the standard deviation
# Keep only two decimal points
mean_temperature=round(np.mean(temperature), 2)
std_temperature=round(np.std(temperature), 2)
print(f"The mean temperature is {mean_temperature} ± {std_temperature} K")The mean temperature is 300.01 ± 1.79 K
</code>
Finally, you can plot the temperature *vs* simulation time._____no_output_____
<code>
# Plot the temperature
pyplot.plot(time, temperature, color='darkred')
pyplot.title("Temperature over time")
pyplot.xlabel("Time [ps]")
pyplot.ylabel("Temperature [K]")
pyplot.show()_____no_output_____
</code>
### <span style='color:darkred'> 4.2.2 Energy of the system </span>
Another set of properties that is quite useful to examine is the various energetic contributions to the energy. The total
energy should be constant. but the various contributions can change and this can sometimes indicate something
interesting or strange happening in your simulation. Let us look at some energetic properties of the simulation.
We have already exctracted the Lennard-Jones energy, the Coulomb energy and the potential energy using again the GROMACS built-in tool `gmx energy`. The data of these three energetic components are saved in the same file called `1hsg_energies.xvg`; the first column contains the time (in ps) and the columns that follow contain the energies (in kJ/mol), in the same order as they were generated.
We can now read the data from the `1hsg_energies.xvg` file using numpy._____no_output_____
<code>
# Read the file that contains the various energetic components for each frame
time=np.loadtxt(f"{path}/1hsg_energies.xvg", comments=['#','@'])[:, 0]
lennard_jones=np.loadtxt(f"{path}/1hsg_energies.xvg", comments=['#','@'])[:, 1]
coulomb=np.loadtxt(f"{path}/1hsg_energies.xvg", comments=['#','@'])[:, 2]
potential=np.loadtxt(f"{path}/1hsg_energies.xvg", comments=['#','@'])[:, 3]_____no_output_____
</code>
And now that we read the data file, we can plot the energetic components *vs* simulation time in separate plots using matplotlib._____no_output_____
<code>
# Plot the Lennard-Jones energy
pyplot.plot(time, lennard_jones, color='blue')
pyplot.title("Lennard Jones energy over time")
pyplot.xlabel("Time [ps]")
pyplot.ylabel("LJ energy [kJ/mol]")
pyplot.show()
# Plot the electrostatic energy
pyplot.plot(time, coulomb, color='purple')
pyplot.title("Electrostatic energy over time")
pyplot.xlabel("Time [ps]")
pyplot.ylabel("Coulomb energy [kJ/mol]")
pyplot.show()
# Plot the potential energy
pyplot.plot(time, potential, color='green')
pyplot.title("Potential energy over time")
pyplot.xlabel("Time [ps]")
pyplot.ylabel("Potential energy [kJ/mol]")
pyplot.show()_____no_output_____
</code>
<span style='color:Blue'> **Questions** </span>
* Can you plot the Coulomb energy and the potential energy, following the same steps as above?
* Is the total energy stable in this simulation?
* What is the dominant contribution to the potential energy?_____no_output_____## <span style='color:darkred'> 4.3 Analysis of Protein </span>
### <span style='color:darkred'> 4.3.1 Root mean square deviation (RMSD) of 1HSG </span>
The RMSD gives us an idea of how 'stable' our protein is when compared to our starting, static, structure. The lower the RMSD is, the more stable we can say our protein is.
The RMSD as a function of time, $\rho (t)$, can be defined by the following equation:
\begin{equation}
\\
\rho (t) = \sqrt{\frac{1}{N}\sum^N_{i=1}w_i\big(\mathbf{x}_i(t) - \mathbf{x}^{\text{ref}}_i\big)^2}
\end{equation}
Luckily MDAnalysis has its own built-in function to calculate this and we can import it.
_____no_output_____
<code>
# Import built-in MDAnalysis tools for alignment and RMSD.
from MDAnalysis.analysis import align
from MDAnalysis.analysis.rms import RMSD as rmsd
# Define the simulation universe and the reference structure (protein structure at first frame)
protein = MDAnalysis.Universe(f"{path}/md.gro", f"{path}/md_fit.xtc")
protein_ref = MDAnalysis.Universe(f"{path}/em.gro", f"{path}/md_fit.xtc")
protein_ref.trajectory[0]
# Call the MDAnalysis align function to align the MD simulation universe to the reference (first frame) universe
align_strucs = align.AlignTraj(protein, protein_ref, select="backbone", weights="mass", in_memory=True, verbose=True)
R = align_strucs.run()
rmsd_data = R.rmsd
# Plot the RMSD
pyplot.plot(rmsd_data)
pyplot.title("RMSD over time")
pyplot.xlabel("Frame number")
pyplot.ylabel("RMSD (Angstrom)")
pyplot.show()
_____no_output_____
</code>
<span style='color:Blue'> **Questions** </span>
* What does this tell you about the stability of the protein? Is it in a state of equilibrium and if so why and at what time?
* Can you think of a situation where this approach might not be a very good indication of stability?_____no_output_____### <span style='color:darkred'> 4.3.2 Root mean square fluctuation (RMSF) of 1HSG </span>
A similar property that is particularly useful is the root mean square fluctuation (RMSF), which shows how each residue flucuates over its average position.
The RMSF for an atom, $\rho_i$, is given by:
\begin{equation}
\rho_i = \sqrt{\sum^N_{i=1} \big\langle(\mathbf{x}_i - \langle \mathbf{x}_i \rangle )^2 \big\rangle }
\end{equation}_____no_output_____
<code>
from MDAnalysis.analysis.rms import RMSF as rmsf
# Define again the simulation universe, using however the renumbered .gro file that you had generated earlier
protein = MDAnalysis.Universe(f"{path}/em.gro", f"{path}/md_fit.xtc")
# Reset the trajectory to the first frame
protein.trajectory[0]
# We will need to select the alpha Carbons only
calphas = protein.select_atoms("name CA")
# Compute the RMSF of alpha carbons. Omit the first 20 frames,
# assuming that the system needs this amount of time (200 ps) to equilibrate
rmsf_calc = rmsf(calphas, verbose=True).run(start=20)
# Plot the RMSF
pyplot.plot(calphas.resindices+1, rmsf_calc.rmsf, color='darkorange' )
pyplot.title("Per-Residue Alpha Carbon RMSF")
pyplot.xlabel("Residue Number")
pyplot.ylabel("RMSF (Angstrom)")
pyplot.show()_____no_output_____
</code>
<span style='color:Blue'> **Questions** </span>
* Can you identify structural regions alone from this plot and does that fit in with the structure?
* Residues 43-58 form part of the flexible flap that covers the binding site. How does this region behave in the simulation?_____no_output_____### <span style='color:darkred'> 4.3.3 Hydrogen Bond Formation </span>
We can also use the simulation to monitor the formation of any hydrogen bonds that may be of interest.
In the case of HIV-1 protease, the hydrogen bonds (HB) that are formed between the ARG8', the ASP29 and the ARG87 amino acids at the interface of the two subunits act in stabilising the dimer.
We can analyse the trajectory and monitor the stability of these interactions *vs* simulation time._____no_output_____
<code>
# Import the MDAnalysis built-in tool for HB Analysis
from MDAnalysis.analysis.hydrogenbonds.hbond_analysis import HydrogenBondAnalysis as HBA
# Define the protein universe
# Note that when using this tool, it is recommended to include the .tpr file instead of the .gro file,
# because it contains bond information, required for the identification of donors and acceptors.
protein = MDAnalysis.Universe(f"{path}/md.tpr", f"{path}/md.xtc")
# Define the atom selections for the HB calculation.
# In this case, the ARG hydrogens and the ASP oxygens, which act as the HB acceptors are specifically defined.
hbonds = HBA(universe=protein, hydrogens_sel='resname ARG and name HH21 HH22', acceptors_sel='resname ASP and name OD1 OD2')
# Perform the HB calculation
hbonds.run()
# Plot the total number of ASP-ARG HBs vs time
hbonds_time=hbonds.times
hbonds_data=hbonds.count_by_time()
pyplot.plot(hbonds_time, hbonds_data, color='darkorange')
pyplot.title("ASP-ARG Hydrogen Bonds")
pyplot.xlabel("Time [ps]")
pyplot.ylabel("# Hydrogen Bonds")
pyplot.show()
# Compute and print the average number of HBs and the standard deviation
aver_hbonds=round(np.mean(hbonds_data), 2)
std_hbonds=round(np.std(hbonds_data), 2)
print(f"The average number of ASP-ARG HBs is {aver_hbonds} ± {std_hbonds}")/home/mjkikaz2/anaconda3/envs/oxpy/lib/python3.6/site-packages/MDAnalysis/core/topologyattrs.py:2011: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray
np.array(sorted(unique_bonds)), 4)
</code>
<span style='color:Blue'> **Questions** </span>
* How much variation is there in the number of hydrogen bonds?
* Do any break and not reform?
* Using VMD, can you observe the HB formation and breakage throughout the simulation?
***_____no_output_____This concludes the analysis section, but the aim was only to give you an idea of the numerous information that we can gain when analysing an MD trajectory. Feel free to ask and attempt to answer your own questions, utilising the tools that you were introduced to during the tutorial._____no_output_____## <span style='color:darkred'> 4.4 Further Reading </span>
The texts recommended here are the same as those mentioned in the lecture:
* "Molecular Modelling. Principles and Applications". Andrew Leach. Publisher: Prentice Hall. ISBN: 0582382106. This book has rapidly become the defacto introductory text for all aspects of simulation.
* "Computer simulation of liquids". Allen, Michael P., and Dominic J. Tildesley. Oxford university press, 2017.
* "Molecular Dynamics Simulation: Elementary Methods". J.M. Haile. Publisher: Wiley. ISBN: 047118439X. This text provides a more focus but slightly more old-fashioned view of simulation. It has some nice simple examples of how to code (in fortran) some of the algorithms though.
_____no_output_____
| {
"repository": "AfroditiMariaZaki/OxCompBio",
"path": "tutorials/MD/04_Trajectory_Analysis_Solutions.ipynb",
"matched_keywords": [
"molecular dynamics"
],
"stars": null,
"size": 226085,
"hexsha": "d0c237d677b7ec885747abf21a8da4ab1f20f7aa",
"max_line_length": 35044,
"avg_line_length": 308.8592896175,
"alphanum_fraction": 0.928894
} |
# Notebook from MarceloClaro/whitebox-python
Path: examples/whitebox.ipynb
# A tutorial for the whitebox Python package
This notebook demonstrates the usage of the **whitebox** Python package for geospatial analysis, which is built on a stand-alone executable command-line program called [WhiteboxTools](https://github.com/jblindsay/whitebox-tools).
* Authors: Dr. John Lindsay (https://jblindsay.github.io/ghrg/index.html)
* Contributors: Dr. Qiusheng Wu (https://wetlands.io)
* GitHub repo: https://github.com/giswqs/whitebox-python
* WhiteboxTools: https://github.com/jblindsay/whitebox-tools
* User Manual: https://jblindsay.github.io/wbt_book
* PyPI: https://pypi.org/project/whitebox/
* Documentation: https://whitebox.readthedocs.io
* Binder: https://gishub.org/whitebox-cloud
* Free software: [MIT license](https://opensource.org/licenses/MIT)
This tutorial can be accessed in three ways:
* HTML version: https://gishub.org/whitebox-html
* Viewable Notebook: https://gishub.org/whitebox-notebook
* Interactive Notebook: https://gishub.org/whitebox-cloud
**Launch this tutorial as an interactive Jupyter Notebook on the cloud - [MyBinder.org](https://gishub.org/whitebox-cloud).**
_____no_output_____## Table of Content
* [Installation](#Installation)
* [About whitebox](#About-whitebox)
* [Getting data](#Getting-data)
* [Using whitebox](#Using-whitebox)
* [Displaying results](#Displaying-results)
* [whitebox GUI](#whitebox-GUI)
* [Citing whitebox](#Citing-whitebox)
* [Credits](#Credits)
* [Contact](#Contact)
_____no_output_____## Installation
**whitebox** supports a variety of platforms, including Microsoft Windows, macOS, and Linux operating systems. Note that you will need to have **Python 3.x** installed. Python 2.x is not supported. The **whitebox** Python package can be installed using the following command:
`pip install whitebox`
If you have installed **whitebox** Python package before and want to upgrade to the latest version, you can use the following command:
`pip install whitebox -U`
If you encounter any installation issues, please check [Troubleshooting](https://github.com/giswqs/whitebox#troubleshooting) on the **whitebox** GitHub page and [Report Bugs](https://github.com/giswqs/whitebox#reporting-bugs)._____no_output_____## About whitebox
**import whitebox and call WhiteboxTools()**_____no_output_____
<code>
import whitebox
wbt = whitebox.WhiteboxTools()_____no_output_____
</code>
**Prints the whitebox-tools help...a listing of available commands**_____no_output_____
<code>
print(wbt.help())WhiteboxTools Help
The following commands are recognized:
--cd, --wd Changes the working directory; used in conjunction with --run flag.
-h, --help Prints help information.
-l, --license Prints the whitebox-tools license.
--listtools Lists all available tools. Keywords may also be used, --listtools slope.
-r, --run Runs a tool; used in conjuction with --wd flag; -r="LidarInfo".
--toolbox Prints the toolbox associated with a tool; --toolbox=Slope.
--toolhelp Prints the help associated with a tool; --toolhelp="LidarInfo".
--toolparameters Prints the parameters (in json form) for a specific tool; --toolparameters="LidarInfo".
-v Verbose mode. Without this flag, tool outputs will not be printed.
--viewcode Opens the source code of a tool in a web browser; --viewcode="LidarInfo".
--version Prints the version information.
Example Usage:
>> ./whitebox-tools -r=lidar_info --cd="/path/to/data/" -i=input.las --vlr --geokeys
</code>
**Prints the whitebox-tools license**_____no_output_____
<code>
print(wbt.license())WhiteboxTools License
Copyright 2017-2019 John Lindsay
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
</code>
**Prints the whitebox-tools version**_____no_output_____
<code>
print("Version information: {}".format(wbt.version()))Version information: WhiteboxTools v1.0.0 by Dr. John B. Lindsay (c) 2017-2019
WhiteboxTools is an advanced geospatial data analysis platform developed at
the University of Guelph's Geomorphometry and Hydrogeomatics Research
Group (GHRG). See https://jblindsay.github.io/ghrg/WhiteboxTools/index.html
for more details.
</code>
**Print the help for a specific tool.**_____no_output_____
<code>
print(wbt.tool_help("ElevPercentile"))ElevPercentile
Description:
Calculates the elevation percentile raster from a DEM.
Toolbox: Geomorphometric Analysis
Parameters:
Flag Description
----------------- -----------
-i, --input, --dem Input raster DEM file.
-o, --output Output raster file.
--filterx Size of the filter kernel in the x-direction.
--filtery Size of the filter kernel in the y-direction.
--sig_digits Number of significant digits.
Example usage:
>>./whitebox_tools -r=ElevPercentile -v --wd="/path/to/data/" --dem=DEM.tif -o=output.tif --filter=25
</code>
**Tool names in the whitebox Python package can be called either using the snake_case or CamelCase convention (e.g. lidar_info or LidarInfo). The example below uses snake_case.** _____no_output_____
<code>
import os, pkg_resources
# identify the sample data directory of the package
data_dir = os.path.dirname(pkg_resources.resource_filename("whitebox", 'testdata/'))
# set whitebox working directory
wbt.set_working_dir(data_dir)
wbt.verbose = False
# call whiteboxtools
wbt.feature_preserving_smoothing("DEM.tif", "smoothed.tif", filter=9)
wbt.breach_depressions("smoothed.tif", "breached.tif")
wbt.d_inf_flow_accumulation("breached.tif", "flow_accum.tif")_____no_output_____
</code>
**You can search tools using keywords. For example, the script below searches and lists tools with 'lidar' or 'LAS' in tool name or description.**_____no_output_____
<code>
lidar_tools = wbt.list_tools(['lidar', 'LAS'])
for index, tool in enumerate(lidar_tools):
print("{} {}: {} ...".format(str(index+1).zfill(3), tool, lidar_tools[tool][:45]))001 classify_overlap_points: Classifies or filters LAS points in regions o ...
002 clip_lidar_to_polygon: Clips a LiDAR point cloud to a vector polygon ...
003 erase_polygon_from_lidar: Erases (cuts out) a vector polygon or polygon ...
004 filter_lidar_classes: Removes points in a LAS file with certain spe ...
005 filter_lidar_scan_angles: Removes points in a LAS file with scan angles ...
006 find_flightline_edge_points: Identifies points along a flightline's edge i ...
007 find_patch_or_class_edge_cells: Finds all cells located on the edge of patch ...
008 flightline_overlap: Reads a LiDAR (LAS) point file and outputs a ...
009 las_to_ascii: Converts one or more LAS files into ASCII tex ...
010 las_to_multipoint_shapefile: Converts one or more LAS files into Multipoin ...
011 las_to_shapefile: Converts one or more LAS files into a vector ...
012 lidar_block_maximum: Creates a block-maximum raster from an input ...
013 lidar_block_minimum: Creates a block-minimum raster from an input ...
014 lidar_classify_subset: Classifies the values in one LiDAR point clou ...
015 lidar_colourize: Adds the red-green-blue colour fields of a Li ...
016 lidar_construct_vector_tin: Creates a vector triangular irregular network ...
017 lidar_elevation_slice: Outputs all of the points within a LiDAR (LAS ...
018 lidar_ground_point_filter: Identifies ground points within LiDAR dataset ...
019 lidar_hex_binning: Hex-bins a set of LiDAR points. ...
020 lidar_hillshade: Calculates a hillshade value for points withi ...
021 lidar_histogram: Creates a histogram of LiDAR data. ...
022 lidar_idw_interpolation: Interpolates LAS files using an inverse-dista ...
023 lidar_info: Prints information about a LiDAR (LAS) datase ...
024 lidar_join: Joins multiple LiDAR (LAS) files into a singl ...
025 lidar_kappa_index: Performs a kappa index of agreement (KIA) ana ...
026 lidar_nearest_neighbour_gridding: Grids LAS files using nearest-neighbour schem ...
027 lidar_point_density: Calculates the spatial pattern of point densi ...
028 lidar_point_stats: Creates several rasters summarizing the distr ...
029 lidar_ransac_planes: Removes outliers (high and low points) in a L ...
030 lidar_remove_duplicates: Removes duplicate points from a LiDAR data se ...
031 lidar_remove_outliers: Removes outliers (high and low points) in a L ...
032 lidar_segmentation: Segments a LiDAR point cloud based on normal ...
033 lidar_segmentation_based_filter: Identifies ground points within LiDAR point c ...
034 lidar_tin_gridding: Creates a raster grid based on a Delaunay tri ...
035 lidar_thin: Thins a LiDAR point cloud, reducing point den ...
036 lidar_thin_high_density: Thins points from high density areas within a ...
037 lidar_tile: Tiles a LiDAR LAS file into multiple LAS file ...
038 lidar_tile_footprint: Creates a vector polygon of the convex hull o ...
039 lidar_tophat_transform: Performs a white top-hat transform on a Lidar ...
040 normal_vectors: Calculates normal vectors for points within a ...
041 pennock_landform_class: Classifies hillslope zones based on slope, pr ...
042 raster_area: Calculates the area of polygons or classes wi ...
043 raster_cell_assignment: Assign row or column number to cells. ...
044 reclass: Reclassifies the values in a raster image. ...
045 reclass_equal_interval: Reclassifies the values in a raster image bas ...
046 reclass_from_file: Reclassifies the values in a raster image usi ...
047 select_tiles_by_polygon: Copies LiDAR tiles overlapping with a polygon ...
048 shape_complexity_index_raster: Calculates the complexity of raster polygons ...
049 stream_link_class: Identifies the exterior/interior links and no ...
</code>
**List all available tools in whitebox-tools**. Currently, **whitebox** contains 372 tools. More tools will be added as they become available._____no_output_____
<code>
all_tools = wbt.list_tools()
for index, tool in enumerate(all_tools):
print("{} {}: {} ...".format(str(index+1).zfill(3), tool, all_tools[tool][:45]))001 absolute_value: Calculates the absolute value of every cell i ...
002 adaptive_filter: Performs an adaptive filter on an image. ...
003 add: Performs an addition operation on two rasters ...
004 add_point_coordinates_to_table: Modifies the attribute table of a point vecto ...
005 aggregate_raster: Aggregates a raster to a lower resolution. ...
006 and: Performs a logical AND operator on two Boolea ...
007 anova: Performs an analysis of variance (ANOVA) test ...
008 arc_cos: Returns the inverse cosine (arccos) of each v ...
009 arc_sin: Returns the inverse sine (arcsin) of each val ...
010 arc_tan: Returns the inverse tangent (arctan) of each ...
011 aspect: Calculates an aspect raster from an input DEM ...
012 atan2: Returns the 2-argument inverse tangent (atan2 ...
013 attribute_correlation: Performs a correlation analysis on attribute ...
014 attribute_histogram: Creates a histogram for the field values of a ...
015 attribute_scattergram: Creates a scattergram for two field values of ...
016 average_flowpath_slope: Measures the average slope gradient from each ...
017 average_normal_vector_angular_deviation: Calculates the circular variance of aspect at ...
018 average_overlay: Calculates the average for each grid cell fro ...
019 average_upslope_flowpath_length: Measures the average length of all upslope fl ...
020 balance_contrast_enhancement: Performs a balance contrast enhancement on a ...
021 basins: Identifies drainage basins that drain to the ...
022 bilateral_filter: A bilateral filter is an edge-preserving smoo ...
023 block_maximum_gridding: Creates a raster grid based on a set of vecto ...
024 block_minimum_gridding: Creates a raster grid based on a set of vecto ...
025 boundary_shape_complexity: Calculates the complexity of the boundaries o ...
026 breach_depressions: Breaches all of the depressions in a DEM usin ...
027 breach_single_cell_pits: Removes single-cell pits from an input DEM by ...
028 buffer_raster: Maps a distance-based buffer around each non- ...
029 ceil: Returns the smallest (closest to negative inf ...
030 centroid: Calculates the centroid, or average location, ...
031 centroid_vector: Identifes the centroid point of a vector poly ...
032 change_vector_analysis: Performs a change vector analysis on a two-da ...
033 circular_variance_of_aspect: Calculates the circular variance of aspect at ...
034 classify_overlap_points: Classifies or filters LAS points in regions o ...
035 clean_vector: Removes null features and lines/polygons with ...
036 clip: Extract all the features, or parts of feature ...
037 clip_lidar_to_polygon: Clips a LiDAR point cloud to a vector polygon ...
038 clip_raster_to_polygon: Clips a raster to a vector polygon. ...
039 closing: A closing is a mathematical morphology operat ...
040 clump: Groups cells that form discrete areas, assign ...
041 compactness_ratio: Calculates the compactness ratio (A/P), a mea ...
042 conservative_smoothing_filter: Performs a conservative-smoothing filter on a ...
043 construct_vector_tin: Creates a vector triangular irregular network ...
044 convert_nodata_to_zero: Converts nodata values in a raster to zero. ...
045 convert_raster_format: Converts raster data from one format to anoth ...
046 corner_detection: Identifies corner patterns in boolean images ...
047 correct_vignetting: Corrects the darkening of images towards corn ...
048 cos: Returns the cosine (cos) of each values in a ...
049 cosh: Returns the hyperbolic cosine (cosh) of each ...
050 cost_allocation: Identifies the source cell to which each grid ...
051 cost_distance: Performs cost-distance accumulation on a cost ...
052 cost_pathway: Performs cost-distance pathway analysis using ...
053 count_if: Counts the number of occurrences of a specifi ...
054 create_colour_composite: Creates a colour-composite image from three b ...
055 create_hexagonal_vector_grid: Creates a hexagonal vector grid. ...
056 create_plane: Creates a raster image based on the equation ...
057 create_rectangular_vector_grid: Creates a rectangular vector grid. ...
058 crispness_index: Calculates the Crispness Index, which is used ...
059 cross_tabulation: Performs a cross-tabulation on two categorica ...
060 csv_points_to_vector: Converts a CSV text file to vector points. ...
061 cumulative_distribution: Converts a raster image to its cumulative dis ...
062 d8_flow_accumulation: Calculates a D8 flow accumulation raster from ...
063 d8_mass_flux: Performs a D8 mass flux calculation. ...
064 d8_pointer: Calculates a D8 flow pointer raster from an i ...
065 d_inf_flow_accumulation: Calculates a D-infinity flow accumulation ras ...
066 d_inf_mass_flux: Performs a D-infinity mass flux calculation. ...
067 d_inf_pointer: Calculates a D-infinity flow pointer (flow di ...
068 decrement: Decreases the values of each grid cell in an ...
069 depth_in_sink: Measures the depth of sinks (depressions) in ...
070 dev_from_mean_elev: Calculates deviation from mean elevation. ...
071 diff_from_mean_elev: Calculates difference from mean elevation (eq ...
072 diff_of_gaussian_filter: Performs a Difference of Gaussian (DoG) filte ...
073 difference: Outputs the features that occur in one of the ...
074 direct_decorrelation_stretch: Performs a direct decorrelation stretch enhan ...
075 directional_relief: Calculates relief for cells in an input DEM f ...
076 dissolve: Removes the interior, or shared, boundaries w ...
077 distance_to_outlet: Calculates the distance of stream grid cells ...
078 diversity_filter: Assigns each cell in the output grid the numb ...
079 divide: Performs a division operation on two rasters ...
080 downslope_distance_to_stream: Measures distance to the nearest downslope st ...
081 downslope_flowpath_length: Calculates the downslope flowpath length from ...
082 downslope_index: Calculates the Hjerdt et al. (2004) downslope ...
083 edge_density: Calculates the density of edges, or breaks-in ...
084 edge_preserving_mean_filter: Performs a simple edge-preserving mean filter ...
085 edge_proportion: Calculate the proportion of cells in a raster ...
086 elev_above_pit: Calculate the elevation of each grid cell abo ...
087 elev_percentile: Calculates the elevation percentile raster fr ...
088 elev_relative_to_min_max: Calculates the elevation of a location relati ...
089 elev_relative_to_watershed_min_max: Calculates the elevation of a location relati ...
090 elevation_above_stream: Calculates the elevation of cells above the n ...
091 elevation_above_stream_euclidean: Calculates the elevation of cells above the n ...
092 eliminate_coincident_points: Removes any coincident, or nearly coincident, ...
093 elongation_ratio: Calculates the elongation ratio for vector po ...
094 emboss_filter: Performs an emboss filter on an image, simila ...
095 equal_to: Performs a equal-to comparison operation on t ...
096 erase: Removes all the features, or parts of feature ...
097 erase_polygon_from_lidar: Erases (cuts out) a vector polygon or polygon ...
098 erase_polygon_from_raster: Erases (cuts out) a vector polygon from a ras ...
099 euclidean_allocation: Assigns grid cells in the output raster the v ...
100 euclidean_distance: Calculates the Shih and Wu (2004) Euclidean d ...
101 exp: Returns the exponential (base e) of values in ...
102 exp2: Returns the exponential (base 2) of values in ...
103 export_table_to_csv: Exports an attribute table to a CSV text file ...
104 extend_vector_lines: Extends vector lines by a specified distance. ...
105 extract_nodes: Converts vector lines or polygons into vertex ...
106 extract_raster_values_at_points: Extracts the values of raster(s) at vector po ...
107 extract_streams: Extracts stream grid cells from a flow accumu ...
108 extract_valleys: Identifies potential valley bottom grid cells ...
109 fd8_flow_accumulation: Calculates an FD8 flow accumulation raster fr ...
110 fd8_pointer: Calculates an FD8 flow pointer raster from an ...
111 farthest_channel_head: Calculates the distance to the furthest upstr ...
112 fast_almost_gaussian_filter: Performs a fast approximate Gaussian filter o ...
113 feature_preserving_smoothing: Reduces short-scale variation in an input DEM ...
114 fetch_analysis: Performs an analysis of fetch or upwind dista ...
115 fill_burn: Burns streams into a DEM using the FillBurn ( ...
116 fill_depressions: Fills all of the depressions in a DEM. Depres ...
117 fill_missing_data: Fills NoData holes in a DEM. ...
118 fill_single_cell_pits: Raises pit cells to the elevation of their lo ...
119 filter_lidar_classes: Removes points in a LAS file with certain spe ...
120 filter_lidar_scan_angles: Removes points in a LAS file with scan angles ...
121 find_flightline_edge_points: Identifies points along a flightline's edge i ...
122 find_lowest_or_highest_points: Locates the lowest and/or highest valued cell ...
123 find_main_stem: Finds the main stem, based on stream lengths, ...
124 find_no_flow_cells: Finds grid cells with no downslope neighbours ...
125 find_parallel_flow: Finds areas of parallel flow in D8 flow direc ...
126 find_patch_or_class_edge_cells: Finds all cells located on the edge of patch ...
127 find_ridges: Identifies potential ridge and peak grid cell ...
128 flatten_lakes: Flattens lake polygons in a raster DEM. ...
129 flightline_overlap: Reads a LiDAR (LAS) point file and outputs a ...
130 flip_image: Reflects an image in the vertical or horizont ...
131 flood_order: Assigns each DEM grid cell its order in the s ...
132 floor: Returns the largest (closest to positive infi ...
133 flow_accumulation_full_workflow: Resolves all of the depressions in a DEM, out ...
134 flow_length_diff: Calculates the local maximum absolute differe ...
135 gamma_correction: Performs a gamma correction on an input image ...
136 gaussian_contrast_stretch: Performs a Gaussian contrast stretch on input ...
137 gaussian_filter: Performs a Gaussian filter on an image. ...
138 greater_than: Performs a greater-than comparison operation ...
139 hack_stream_order: Assigns the Hack stream order to each tributa ...
140 high_pass_filter: Performs a high-pass filter on an input image ...
141 high_pass_median_filter: Performs a high pass median filter on an inpu ...
142 highest_position: Identifies the stack position of the maximum ...
143 hillshade: Calculates a hillshade raster from an input D ...
144 hillslopes: Identifies the individual hillslopes draining ...
145 histogram_equalization: Performs a histogram equalization contrast en ...
146 histogram_matching: Alters the statistical distribution of a rast ...
147 histogram_matching_two_images: This tool alters the cumulative distribution ...
148 hole_proportion: Calculates the proportion of the total area o ...
149 horizon_angle: Calculates horizon angle (maximum upwind slop ...
150 horton_stream_order: Assigns the Horton stream order to each tribu ...
151 hypsometric_analysis: Calculates a hypsometric curve for one or mor ...
152 idw_interpolation: Interpolates vector points into a raster surf ...
153 ihs_to_rgb: Converts intensity, hue, and saturation (IHS) ...
154 image_autocorrelation: Performs Moran's I analysis on two or more in ...
155 image_correlation: Performs image correlation on two or more inp ...
156 image_regression: Performs image regression analysis on two inp ...
157 image_stack_profile: Plots an image stack profile (i.e. signature) ...
158 impoundment_size_index: Calculates the impoundment size resulting fro ...
159 in_place_add: Performs an in-place addition operation (inpu ...
160 in_place_divide: Performs an in-place division operation (inpu ...
161 in_place_multiply: Performs an in-place multiplication operation ...
162 in_place_subtract: Performs an in-place subtraction operation (i ...
163 increment: Increases the values of each grid cell in an ...
164 integer_division: Performs an integer division operation on two ...
165 integral_image: Transforms an input image (summed area table) ...
166 intersect: Identifies the parts of features in common be ...
167 is_no_data: Identifies NoData valued pixels in an image. ...
168 isobasins: Divides a landscape into nearly equal sized d ...
169 jenson_snap_pour_points: Moves outlet points used to specify points of ...
170 join_tables: Merge a vector's attribute table with another ...
171 k_means_clustering: Performs a k-means clustering operation on a ...
172 k_nearest_mean_filter: A k-nearest mean filter is a type of edge-pre ...
173 ks_test_for_normality: Evaluates whether the values in a raster are ...
174 kappa_index: Performs a kappa index of agreement (KIA) ana ...
175 laplacian_filter: Performs a Laplacian filter on an image. ...
176 laplacian_of_gaussian_filter: Performs a Laplacian-of-Gaussian (LoG) filter ...
177 las_to_ascii: Converts one or more LAS files into ASCII tex ...
178 las_to_multipoint_shapefile: Converts one or more LAS files into Multipoin ...
179 las_to_shapefile: Converts one or more LAS files into a vector ...
180 layer_footprint: Creates a vector polygon footprint of the are ...
181 lee_filter: Performs a Lee (Sigma) smoothing filter on an ...
182 length_of_upstream_channels: Calculates the total length of channels upstr ...
183 less_than: Performs a less-than comparison operation on ...
184 lidar_block_maximum: Creates a block-maximum raster from an input ...
185 lidar_block_minimum: Creates a block-minimum raster from an input ...
186 lidar_classify_subset: Classifies the values in one LiDAR point clou ...
187 lidar_colourize: Adds the red-green-blue colour fields of a Li ...
188 lidar_construct_vector_tin: Creates a vector triangular irregular network ...
189 lidar_elevation_slice: Outputs all of the points within a LiDAR (LAS ...
190 lidar_ground_point_filter: Identifies ground points within LiDAR dataset ...
191 lidar_hex_binning: Hex-bins a set of LiDAR points. ...
192 lidar_hillshade: Calculates a hillshade value for points withi ...
193 lidar_histogram: Creates a histogram of LiDAR data. ...
194 lidar_idw_interpolation: Interpolates LAS files using an inverse-dista ...
195 lidar_info: Prints information about a LiDAR (LAS) datase ...
196 lidar_join: Joins multiple LiDAR (LAS) files into a singl ...
197 lidar_kappa_index: Performs a kappa index of agreement (KIA) ana ...
198 lidar_nearest_neighbour_gridding: Grids LAS files using nearest-neighbour schem ...
199 lidar_point_density: Calculates the spatial pattern of point densi ...
200 lidar_point_stats: Creates several rasters summarizing the distr ...
201 lidar_ransac_planes: Removes outliers (high and low points) in a L ...
202 lidar_remove_duplicates: Removes duplicate points from a LiDAR data se ...
203 lidar_remove_outliers: Removes outliers (high and low points) in a L ...
204 lidar_segmentation: Segments a LiDAR point cloud based on normal ...
205 lidar_segmentation_based_filter: Identifies ground points within LiDAR point c ...
206 lidar_tin_gridding: Creates a raster grid based on a Delaunay tri ...
207 lidar_thin: Thins a LiDAR point cloud, reducing point den ...
208 lidar_thin_high_density: Thins points from high density areas within a ...
209 lidar_tile: Tiles a LiDAR LAS file into multiple LAS file ...
210 lidar_tile_footprint: Creates a vector polygon of the convex hull o ...
211 lidar_tophat_transform: Performs a white top-hat transform on a Lidar ...
212 line_detection_filter: Performs a line-detection filter on an image. ...
213 line_intersections: Identifies points where the features of two v ...
214 line_thinning: Performs line thinning a on Boolean raster im ...
215 linearity_index: Calculates the linearity index for vector pol ...
216 lines_to_polygons: Converts vector polylines to polygons. ...
217 list_unique_values: Lists the unique values contained in a field ...
218 ln: Returns the natural logarithm of values in a ...
219 log10: Returns the base-10 logarithm of values in a ...
220 log2: Returns the base-2 logarithm of values in a r ...
221 long_profile: Plots the stream longitudinal profiles for on ...
222 long_profile_from_points: Plots the longitudinal profiles from flow-pat ...
223 longest_flowpath: Delineates the longest flowpaths for a group ...
224 lowest_position: Identifies the stack position of the minimum ...
225 majority_filter: Assigns each cell in the output grid the most ...
226 max: Performs a MAX operation on two rasters or a ...
227 max_absolute_overlay: Evaluates the maximum absolute value for each ...
228 max_anisotropy_dev: Calculates the maximum anisotropy (directiona ...
229 max_anisotropy_dev_signature: Calculates the anisotropy in deviation from m ...
230 max_branch_length: Lindsay and Seibert's (2013) branch length in ...
231 max_difference_from_mean: Calculates the maximum difference from mean e ...
232 max_downslope_elev_change: Calculates the maximum downslope change in el ...
233 max_elev_dev_signature: Calculates the maximum elevation deviation ov ...
234 max_elevation_deviation: Calculates the maximum elevation deviation ov ...
235 max_overlay: Evaluates the maximum value for each grid cel ...
236 max_upslope_flowpath_length: Measures the maximum length of all upslope fl ...
237 maximum_filter: Assigns each cell in the output grid the maxi ...
238 mean_filter: Performs a mean filter (low-pass filter) on a ...
239 median_filter: Performs a median filter on an input image. ...
240 medoid: Calculates the medoid for a series of vector ...
241 merge_line_segments: Merges vector line segments into larger featu ...
242 merge_table_with_csv: Merge a vector's attribute table with a table ...
243 merge_vectors: Combines two or more input vectors of the sam ...
244 min: Performs a MIN operation on two rasters or a ...
245 min_absolute_overlay: Evaluates the minimum absolute value for each ...
246 min_downslope_elev_change: Calculates the minimum downslope change in el ...
247 min_max_contrast_stretch: Performs a min-max contrast stretch on an inp ...
248 min_overlay: Evaluates the minimum value for each grid cel ...
249 minimum_bounding_box: Creates a vector minimum bounding rectangle a ...
250 minimum_bounding_circle: Delineates the minimum bounding circle (i.e. ...
251 minimum_bounding_envelope: Creates a vector axis-aligned minimum boundin ...
252 minimum_convex_hull: Creates a vector convex polygon around vector ...
253 minimum_filter: Assigns each cell in the output grid the mini ...
254 modified_k_means_clustering: Performs a modified k-means clustering operat ...
255 modify_no_data_value: Converts nodata values in a raster to zero. ...
256 modulo: Performs a modulo operation on two rasters or ...
257 mosaic: Mosaics two or more images together. ...
258 mosaic_with_feathering: Mosaics two images together using a featherin ...
259 multi_part_to_single_part: Converts a vector file containing multi-part ...
260 multiply: Performs a multiplication operation on two ra ...
261 multiscale_roughness: Calculates surface roughness over a range of ...
262 multiscale_roughness_signature: Calculates the surface roughness for points o ...
263 multiscale_std_dev_normals: Calculates surface roughness over a range of ...
264 multiscale_std_dev_normals_signature: Calculates the surface roughness for points o ...
265 multiscale_topographic_position_image: Creates a multiscale topographic position ima ...
266 narrowness_index: Calculates the narrowness of raster polygons. ...
267 nearest_neighbour_gridding: Creates a raster grid based on a set of vecto ...
268 negate: Changes the sign of values in a raster or the ...
269 new_raster_from_base: Creates a new raster using a base image. ...
270 normal_vectors: Calculates normal vectors for points within a ...
271 normalized_difference_index: Calculate a normalized-difference index (NDI) ...
272 not: Performs a logical NOT operator on two Boolea ...
273 not_equal_to: Performs a not-equal-to comparison operation ...
274 num_downslope_neighbours: Calculates the number of downslope neighbours ...
275 num_inflowing_neighbours: Computes the number of inflowing neighbours t ...
276 num_upslope_neighbours: Calculates the number of upslope neighbours t ...
277 olympic_filter: Performs an olympic smoothing filter on an im ...
278 opening: An opening is a mathematical morphology opera ...
279 or: Performs a logical OR operator on two Boolean ...
280 panchromatic_sharpening: Increases the spatial resolution of image dat ...
281 patch_orientation: Calculates the orientation of vector polygons ...
282 pennock_landform_class: Classifies hillslope zones based on slope, pr ...
283 percent_elev_range: Calculates percent of elevation range from a ...
284 percent_equal_to: Calculates the percentage of a raster stack t ...
285 percent_greater_than: Calculates the percentage of a raster stack t ...
286 percent_less_than: Calculates the percentage of a raster stack t ...
287 percentage_contrast_stretch: Performs a percentage linear contrast stretch ...
288 percentile_filter: Performs a percentile filter on an input imag ...
289 perimeter_area_ratio: Calculates the perimeter-area ratio of vector ...
290 pick_from_list: Outputs the value from a raster stack specifi ...
291 plan_curvature: Calculates a plan (contour) curvature raster ...
292 polygon_area: Calculates the area of vector polygons. ...
293 polygon_long_axis: This tool can be used to map the long axis of ...
294 polygon_perimeter: Calculates the perimeter of vector polygons. ...
295 polygon_short_axis: This tool can be used to map the short axis o ...
296 polygonize: Creates a polygon layer from two or more inte ...
297 polygons_to_lines: Converts vector polygons to polylines. ...
298 power: Raises the values in grid cells of one raster ...
299 prewitt_filter: Performs a Prewitt edge-detection filter on a ...
300 principal_component_analysis: Performs a principal component analysis (PCA) ...
301 print_geo_tiff_tags: Prints the tags within a GeoTIFF. ...
302 profile: Plots profiles from digital surface models. ...
303 profile_curvature: Calculates a profile curvature raster from an ...
304 quantiles: Transforms raster values into quantiles. ...
305 radius_of_gyration: Calculates the distance of cells from their p ...
306 raise_walls: Raises walls in a DEM along a line or around ...
307 random_field: Creates an image containing random values. ...
308 random_sample: Creates an image containing randomly located ...
309 range_filter: Assigns each cell in the output grid the rang ...
310 raster_area: Calculates the area of polygons or classes wi ...
311 raster_cell_assignment: Assign row or column number to cells. ...
312 raster_histogram: Creates a histogram from raster values. ...
313 raster_streams_to_vector: Converts a raster stream file into a vector f ...
314 raster_summary_stats: Measures a rasters min, max, average, standar ...
315 raster_to_vector_lines: Converts a raster lines features into a vecto ...
316 raster_to_vector_points: Converts a raster dataset to a vector of the ...
317 rasterize_streams: Rasterizes vector streams based on Lindsay (2 ...
318 reciprocal: Returns the reciprocal (i.e. 1 / z) of values ...
319 reclass: Reclassifies the values in a raster image. ...
320 reclass_equal_interval: Reclassifies the values in a raster image bas ...
321 reclass_from_file: Reclassifies the values in a raster image usi ...
322 reinitialize_attribute_table: Reinitializes a vector's attribute table dele ...
323 related_circumscribing_circle: Calculates the related circumscribing circle ...
324 relative_aspect: Calculates relative aspect (relative to a use ...
325 relative_stream_power_index: Calculates the relative stream power index. ...
326 relative_topographic_position: Calculates the relative topographic position ...
327 remove_off_terrain_objects: Removes off-terrain objects from a raster dig ...
328 remove_polygon_holes: Removes holes within the features of a vector ...
329 remove_short_streams: Removes short first-order streams from a stre ...
330 remove_spurs: Removes the spurs (pruning operation) from a ...
331 resample: Resamples one or more input images into a des ...
332 rescale_value_range: Performs a min-max contrast stretch on an inp ...
333 rgb_to_ihs: Converts red, green, and blue (RGB) images in ...
334 rho8_pointer: Calculates a stochastic Rho8 flow pointer ras ...
335 roberts_cross_filter: Performs a Robert's cross edge-detection filt ...
336 root_mean_square_error: Calculates the RMSE and other accuracy statis ...
337 round: Rounds the values in an input raster to the n ...
338 ruggedness_index: Calculates the Riley et al.'s (1999) terrain ...
339 scharr_filter: Performs a Scharr edge-detection filter on an ...
340 sediment_transport_index: Calculates the sediment transport index. ...
341 select_tiles_by_polygon: Copies LiDAR tiles overlapping with a polygon ...
342 set_nodata_value: Assign a specified value in an input image to ...
343 shape_complexity_index: Calculates overall polygon shape complexity o ...
344 shape_complexity_index_raster: Calculates the complexity of raster polygons ...
345 shreve_stream_magnitude: Assigns the Shreve stream magnitude to each l ...
346 sigmoidal_contrast_stretch: Performs a sigmoidal contrast stretch on inpu ...
347 sin: Returns the sine (sin) of each values in a ra ...
348 single_part_to_multi_part: Converts a vector file containing multi-part ...
349 sinh: Returns the hyperbolic sine (sinh) of each va ...
350 sink: Identifies the depressions in a DEM, giving e ...
351 slope: Calculates a slope raster from an input DEM. ...
352 slope_vs_elevation_plot: Creates a slope vs. elevation plot for one or ...
353 smooth_vectors: Smooths a vector coverage of either a POLYLIN ...
354 snap_pour_points: Moves outlet points used to specify points of ...
355 sobel_filter: Performs a Sobel edge-detection filter on an ...
356 spherical_std_dev_of_normals: Calculates the spherical standard deviation o ...
357 split_colour_composite: This tool splits an RGB colour composite imag ...
358 split_with_lines: Splits the lines or polygons in one layer usi ...
359 square: Squares the values in a raster. ...
360 square_root: Returns the square root of the values in a ra ...
361 standard_deviation_contrast_stretch: Performs a standard-deviation contrast stretc ...
362 standard_deviation_filter: Assigns each cell in the output grid the stan ...
363 standard_deviation_of_slope: Calculates the standard deviation of slope fr ...
364 stochastic_depression_analysis: Preforms a stochastic analysis of depressions ...
365 strahler_order_basins: Identifies Strahler-order basins from an inpu ...
366 strahler_stream_order: Assigns the Strahler stream order to each lin ...
367 stream_link_class: Identifies the exterior/interior links and no ...
368 stream_link_identifier: Assigns a unique identifier to each link in a ...
369 stream_link_length: Estimates the length of each link (or tributa ...
370 stream_link_slope: Estimates the average slope of each link (or ...
371 stream_slope_continuous: Estimates the slope of each grid cell in a st ...
372 subbasins: Identifies the catchments, or sub-basin, drai ...
373 subtract: Performs a differencing operation on two rast ...
374 sum_overlay: Calculates the sum for each grid cell from a ...
375 surface_area_ratio: Calculates a the surface area ratio of each g ...
376 symmetrical_difference: Outputs the features that occur in one of the ...
377 tin_gridding: Creates a raster grid based on a triangular i ...
378 tan: Returns the tangent (tan) of each values in a ...
379 tangential_curvature: Calculates a tangential curvature raster from ...
380 tanh: Returns the hyperbolic tangent (tanh) of each ...
381 thicken_raster_line: Thickens single-cell wide lines within a rast ...
382 to_degrees: Converts a raster from radians to degrees. ...
383 to_radians: Converts a raster from degrees to radians. ...
384 tophat_transform: Performs either a white or black top-hat tran ...
385 topological_stream_order: Assigns each link in a stream network its top ...
386 total_curvature: Calculates a total curvature raster from an i ...
387 total_filter: Performs a total filter on an input image. ...
388 trace_downslope_flowpaths: Traces downslope flowpaths from one or more t ...
389 trend_surface: Estimates the trend surface of an input raste ...
390 trend_surface_vector_points: Estimates a trend surface from vector points. ...
391 tributary_identifier: Assigns a unique identifier to each tributary ...
392 truncate: Truncates the values in a raster to the desir ...
393 turning_bands_simulation: Creates an image containing random values bas ...
394 union: Splits vector layers at their overlaps, creat ...
395 unnest_basins: Extract whole watersheds for a set of outlet ...
396 unsharp_masking: An image sharpening technique that enhances e ...
397 user_defined_weights_filter: Performs a user-defined weights filter on an ...
398 vector_hex_binning: Hex-bins a set of vector points. ...
399 vector_lines_to_raster: Converts a vector containing polylines into a ...
400 vector_points_to_raster: Converts a vector containing points into a ra ...
401 vector_polygons_to_raster: Converts a vector containing polygons into a ...
402 viewshed: Identifies the viewshed for a point or set of ...
403 visibility_index: Estimates the relative visibility of sites in ...
404 voronoi_diagram: Creates a vector Voronoi diagram for a set of ...
405 watershed: Identifies the watershed, or drainage basin, ...
406 weighted_overlay: Performs a weighted sum on multiple input ras ...
407 weighted_sum: Performs a weighted-sum overlay on multiple i ...
408 wetness_index: Calculates the topographic wetness index, Ln( ...
409 write_function_memory_insertion: Performs a write function memory insertion fo ...
410 xor: Performs a logical XOR operator on two Boolea ...
411 z_scores: Standardizes the values in an input raster by ...
412 zonal_statistics: Extracts descriptive statistics for a group o ...
</code>
## Getting data_____no_output_____This section demonstrates two ways to get data into Binder so that you can test **whitebox** on the cloud using your own data.
* [Getting data from direct URLs](#Getting-data-from-direct-URLs)
* [Getting data from Google Drive](#Getting-data-from-Google-Drive)_____no_output_____### Getting data from direct URLs
If you have data hosted on your own HTTP server or GitHub, you should be able to get direct URLs. With a direct URL, users can automatically download the data when the URL is clicked. For example https://github.com/giswqs/whitebox/raw/master/examples/testdata.zip_____no_output_____Import the following Python libraries and start getting data from direct URLs._____no_output_____
<code>
import os
import zipfile
import tarfile
import shutil
import urllib.request_____no_output_____
</code>
Create a folder named *whitebox* under the user home folder and set it as the working directory._____no_output_____
<code>
work_dir = os.path.join(os.path.expanduser("~"), 'whitebox')
if not os.path.exists(work_dir):
os.mkdir(work_dir)
os.chdir(work_dir)
print("Working directory: {}".format(work_dir))Working directory: /home/qiusheng/whitebox
</code>
Replace the following URL with your own direct URL hosting your data._____no_output_____
<code>
url = "https://github.com/giswqs/whitebox/raw/master/examples/testdata.zip"_____no_output_____
</code>
Download data the from the above URL and unzip the file if needed._____no_output_____
<code>
# download the file
zip_name = os.path.basename(url)
zip_path = os.path.join(work_dir, zip_name)
print('Downloading {} ...'.format(zip_name))
urllib.request.urlretrieve(url, zip_path)
print('Downloading done.'.format(zip_name))
# if it is a zip file
if '.zip' in zip_name:
print("Decompressing {} ...".format(zip_name))
with zipfile.ZipFile(zip_name, "r") as zip_ref:
zip_ref.extractall(work_dir)
print('Decompressing done.')
# if it is a tar file
if '.tar' in zip_name:
print("Decompressing {} ...".format(zip_name))
with tarfile.open(zip_name, "r") as tar_ref:
tar_ref.extractall(work_dir)
print('Decompressing done.')
print('Data directory: {}'.format(os.path.splitext(zip_path)[0]))Downloading testdata.zip ...
Downloading done.
Decompressing testdata.zip ...
Decompressing done.
Data directory: /home/qiusheng/whitebox/testdata
</code>
You have successfully downloaded data to Binder. Therefore, you can skip to [Using whitebox](#Using-whitebox) and start testing whitebox with your own data. _____no_output_____### Getting data from Google Drive
Alternatively, you can upload data to [Google Drive](https://www.google.com/drive/) and then [share files publicly from Google Drive](https://support.google.com/drive/answer/2494822?co=GENIE.Platform%3DDesktop&hl=en). Once the file is shared publicly, you should be able to get a shareable URL. For example, https://drive.google.com/file/d/1xgxMLRh_jOLRNq-f3T_LXAaSuv9g_JnV.
To download files from Google Drive to Binder, you can use the Python package called [google-drive-downloader](https://github.com/ndrplz/google-drive-downloader), which can be installed using the following command:
`pip install googledrivedownloader requests`_____no_output_____**Replace the following URL with your own shareable URL from Google Drive.**_____no_output_____
<code>
gfile_url = 'https://drive.google.com/file/d/1xgxMLRh_jOLRNq-f3T_LXAaSuv9g_JnV'_____no_output_____
</code>
**Extract the file id from the above URL.**_____no_output_____
<code>
file_id = gfile_url.split('/')[5] #'1xgxMLRh_jOLRNq-f3T_LXAaSuv9g_JnV'
print('Google Drive file id: {}'.format(file_id))Google Drive file id: 1xgxMLRh_jOLRNq-f3T_LXAaSuv9g_JnV
</code>
**Download the shared file from Google Drive.**_____no_output_____
<code>
from google_drive_downloader import GoogleDriveDownloader as gdd
dest_path = './testdata.zip' # choose a name for the downloaded file
gdd.download_file_from_google_drive(file_id, dest_path, unzip=True)_____no_output_____
</code>
You have successfully downloaded data from Google Drive to Binder. You can now continue to [Using whitebox](#Using-whitebox) and start testing whitebox with your own data. _____no_output_____## Using whitebox_____no_output_____Here you can specify where your data are located. In this example, we will use [DEM.tif](https://github.com/giswqs/whitebox/blob/master/examples/testdata/DEM.tif), which has been downloaded to the testdata folder._____no_output_____**List data under the data folder.**_____no_output_____
<code>
data_dir = './testdata/'
print(os.listdir(data_dir))['breached_sink.tif', 'DEM.dep', 'smoothed.tif', 'DEM.tif.aux.xml', 'DEM.tif', 'breached_sink.tif.aux.xml']
</code>
In this simple example, we smooth [DEM.tif](https://github.com/giswqs/whitebox/blob/master/examples/testdata/DEM.tif) using a [feature preserving denoising](https://github.com/jblindsay/whitebox-tools/blob/master/src/tools/terrain_analysis/feature_preserving_denoise.rs) algorithm. Then, we fill depressions in the DEM using a [depression breaching](https://github.com/jblindsay/whitebox-tools/blob/master/src/tools/hydro_analysis/breach_depressions.rs) algorithm. Finally, we calculate [flow accumulation](https://github.com/jblindsay/whitebox-tools/blob/master/src/tools/hydro_analysis/dinf_flow_accum.rs) based on the depressionless DEM._____no_output_____
<code>
import whitebox
wbt = whitebox.WhiteboxTools()
# set whitebox working directory
wbt.set_working_dir(data_dir)
wbt.verbose = False
# call whiteboxtool
wbt.feature_preserving_smoothing("DEM.tif", "smoothed.tif", filter=9)
wbt.breach_depressions("smoothed.tif", "breached.tif")
wbt.d_inf_flow_accumulation("breached.tif", "flow_accum.tif")_____no_output_____
</code>
## Displaying results
This section demonstrates how to display images on Jupyter Notebook. Three Python packages are used here, including [matplotlib](https://matplotlib.org/), [imageio](https://imageio.readthedocs.io/en/stable/installation.html), and [tifffile](https://pypi.org/project/tifffile/). These three packages can be installed using the following command:
`pip install matplotlib imageio tifffile`
_____no_output_____**Import the libraries.**_____no_output_____
<code>
# comment out the third line (%matplotlib inline) if you run the tutorial in other IDEs other than Jupyter Notebook
import matplotlib.pyplot as plt
import imageio
%matplotlib inline _____no_output_____
</code>
**Display one single image.**_____no_output_____
<code>
raster = imageio.imread(os.path.join(data_dir, 'DEM.tif'))
plt.imshow(raster)
plt.show()_____no_output_____
</code>
**Read images as numpy arrays.**_____no_output_____
<code>
original = imageio.imread(os.path.join(data_dir, 'DEM.tif'))
smoothed = imageio.imread(os.path.join(data_dir, 'smoothed.tif'))
breached = imageio.imread(os.path.join(data_dir, 'breached.tif'))
flow_accum = imageio.imread(os.path.join(data_dir, 'flow_accum.tif'))_____no_output_____
</code>
**Display multiple images in one plot.**_____no_output_____
<code>
fig=plt.figure(figsize=(16,11))
ax1 = fig.add_subplot(2, 2, 1)
ax1.set_title('Original DEM')
plt.imshow(original)
ax2 = fig.add_subplot(2, 2, 2)
ax2.set_title('Smoothed DEM')
plt.imshow(smoothed)
ax3 = fig.add_subplot(2, 2, 3)
ax3.set_title('Breached DEM')
plt.imshow(breached)
ax4 = fig.add_subplot(2, 2, 4)
ax4.set_title('Flow Accumulation')
plt.imshow(flow_accum)
plt.show()_____no_output_____
</code>
## whitebox GUI
WhiteboxTools also provides a Graphical User Interface (GUI) - **WhiteboxTools Runner**, which can be invoked using the following Python script. *__Note that the GUI might not work in Jupyter notebooks deployed on the cloud (e.g., MyBinder.org), but it should work on Jupyter notebooks on local computers.__*
```python
import whitebox
whitebox.Runner()
```
_____no_output_____## Citing whitebox
If you use the **whitebox** Python package for your research and publications, please consider citing the following papers to give Prof. [John Lindsay](http://www.uoguelph.ca/~hydrogeo/index.html) credits for his tremendous efforts in developing [Whitebox GAT](https://github.com/jblindsay/whitebox-geospatial-analysis-tools) and [WhiteboxTools](https://github.com/jblindsay/whitebox-tools). Without his work, this **whitebox** Python package would not exist!
* Lindsay, J. B. (2016). Whitebox GAT: A case study in geomorphometric analysis. Computers & Geosciences, 95, 75-84. http://dx.doi.org/10.1016/j.cageo.2016.07.003_____no_output_____## Credits
This interactive notebook is made possible by [MyBinder.org](https://mybinder.org/). Big thanks to [MyBinder.org](https://mybinder.org/) for developing the amazing binder platform, which is extremely valuable for reproducible research!
This tutorial made use a number of open-source Python packages, including [ Cookiecutter](https://github.com/audreyr/cookiecutter), [numpy](http://www.numpy.org/), [matplotlib](https://matplotlib.org/), [imageio](https://imageio.readthedocs.io/en/stable/installation.html), [tifffile](https://pypi.org/project/tifffile/), and [google-drive-downloader](https://github.com/ndrplz/google-drive-downloader). Thanks to all developers of these wonderful Python packages!
_____no_output_____## Contact
If you have any questions regarding this tutorial or the **whitebox** Python package, you can contact me (Dr. Qiusheng Wu) at [email protected] or https://wetlands.io/#contact_____no_output_____
| {
"repository": "MarceloClaro/whitebox-python",
"path": "examples/whitebox.ipynb",
"matched_keywords": [
"single-cell"
],
"stars": 1,
"size": 374968,
"hexsha": "d0c287e030f7d4f768d1fd1a82000072d7febc08",
"max_line_length": 230608,
"avg_line_length": 288.4369230769,
"alphanum_fraction": 0.915574129
} |
# Notebook from slamb89/ml_training
Path: notebooks/Python-in-2-days/D1_L3_Python/__Python_Summary__.ipynb
# Introduction to Python_____no_output_____In this lesson we will learn the basics of the Python programming language (version 3). We won't learn everything about Python but enough to do some basic machine learning.
<img src="figures/python.png" width=350>
_____no_output_____# Variables_____no_output_____Variables are objects in Python that can hold anything with numbers or text. Let's look at how to create some variables._____no_output_____
<code>
# Numerical example
x = 99
print (x)
x = 27 # Added numerical value of 27
print (x)
x=55 # Added numerical value of 55
print (x)
"""changed numerical values of x"""99
27
55
# Text example
x = "learning to code is fun" # Changed text to "learning to code is fun" and "tomorrow"
print (x)
x="tomorrow"
print(x)
"""changed sentences and value of x. Modified spacing to see if it altered the output"""learning to code is fun
tomorrow
# Variables can be used with each other
a = 2 # Changed values of a, b, and c
b = 298
c = a + b
print (c)
a = 3
b = 4
c = 27
d = 22
e = a + b + c + d
print (e)
"""Changed values of a, b, and c. Created additional values for new variables."""300
56
</code>
Variables can come in lots of different types. Even within numerical variables, you can have integers (int), floats (float), etc. All text based variables are of type string (str). We can see what type a variable is by printing its type._____no_output_____
<code>
# int variable
x = 2
print (x)
print (type(x))
x = 1
print (x)
print (type(x))
# float variable
x = 7.7
print (x)
print (type(x))
x = 2.25
print (x)
print (type(x))
# text variable
x = "hello Sheri"
print (x)
print (type(x))
x = "Thunderstorms"
print (x)
print (type(x))
# boolean variable
x = False
print (x)
print (type(x))
x = True
print (x)
print (type(x))
"""Created new values for the variable x"""2
<class 'int'>
1
<class 'int'>
7.7
<class 'float'>
2.25
<class 'float'>
hello Sheri
<class 'str'>
Thunderstorms
<class 'str'>
False
<class 'bool'>
True
<class 'bool'>
</code>
It's good practice to know what types your variables are. When you want to use numerical operations on them, they need to be compatible. _____no_output_____
<code>
# int variables
a = 6
b = 2
print (a + b)
# string variables
a = "6"
b = "2"
print (a + b)
a = "4"
b = "3"
c = "5"
print (a + b + c)
a = 4
b = 3
c = 5
print (a + b + c)
"""Changed existing value of int and string variables. Created new variables"""8
62
435
12
</code>
# Lists_____no_output_____Lists are objects in Python that can hold a ordered sequence of numbers **and** text._____no_output_____
<code>
# Creating a list
list_x = [2, "hello", 1]
print (list_x)
list_a = [1, "sheri lamb", 4]
print (list_a)
"""Created a new list a"""[2, 'hello', 1]
[1, 'sheri lamb', 4]
# Adding to a list
list_x.append(7)
print (list_x)
list_a.append("tomorrow")
print (list_a)
"""Added 'tomorrow' to my list"""[2, 'hello', 1, 7, 7, 7]
[1, 'sheri lamb', 4, 'tomorrow', 'tomorrow', 'tomorrow']
# Accessing items at specific location in a list
print ("list_x[0]: ", list_x[0])
print ("list_x[1]: ", list_x[1])
print ("list_x[2]: ", list_x[2])
print ("list_x[-1]: ", list_x[-1]) # the last item
print ("list_x[-2]: ", list_x[-2]) # the second to last item
print ("list_x[5]:", list_x[5])
"""accessed item #5"""list_x[0]: 1
list_x[1]: sheri lamb
list_x[2]: 4
list_x[-1]: tomorrow
list_x[-2]: 7
list_x[5]: 7
# Slicing
print ("list_x[:]: ", list_x[:])
print ("list_x[2:]: ", list_x[2:])
print ("list_x[1:3]: ", list_x[1:3])
print ("list_x[:-1]: ", list_x[:-1])
print ("list_x[5:]: ", list_x[5:])
"""added #5 to # slicing"""list_x[:]: [1, 'sheri lamb', 4, 7, 'tomorrow', 7, 'tomorrow', 7, 'tomorrow']
list_x[2:]: [4, 7, 'tomorrow', 7, 'tomorrow', 7, 'tomorrow']
list_x[1:3]: ['sheri lamb', 4]
list_x[:-1]: [1, 'sheri lamb', 4, 7, 'tomorrow', 7, 'tomorrow', 7]
list_x[5:]: [7, 'tomorrow', 7, 'tomorrow']
# Length of a list
len(list_x)
len(list_x)
len(list_a)
"""calculated the length of list_a"""_____no_output_____# Replacing items in a list
list_x[1] = "hi"
print (list_x)
list_a[1] = "yes"
print (list_a)
"""replaced item 1 with yes"""[2, 'hi', 1, 7, 7, 7]
[1, 'yes', 4, 'tomorrow', 'tomorrow', 'tomorrow']
# Combining lists
list_y = [2.4, "world"]
list_z = list_x + list_y
print (list_z)
list_h = [1, 2,"fire"]
list_i = [4, 7, "Stella"]
list_4 = list_h + list_i
print (list_4)
"""Created 2 new lists and combined them to create a third (list_4)"""[2, 'hi', 1, 7, 7, 7, 2.4, 'world']
[1, 2, 'fire', 4, 7, 'Stella']
</code>
# Tuples_____no_output_____Tuples are also objects in Python that can hold data but you cannot replace their values (for this reason, tuples are called immutable, whereas lists are known as mutable)._____no_output_____
<code>
# Creating a tuple
tuple_x = (3.0, "hello")
print (tuple_x)
tuple_y = (5.0, "Star")
print (tuple_y)
"""Created tuple y"""(3.0, 'hello')
(5.0, 'Star')
# Adding values to a tuple
tuple_x = tuple_x + (5.6,)
print (tuple_x)
tuple_z = tuple_y + (2.4,)
print (tuple_z)
"""added 2.4 to tuple_z"""(3.0, 'hello', 5.6, 5.6, 5.6, 5.6)
(5.0, 'Star', 2.4)
# Trying to change a tuples value (you can't, this should produce an error.)
tuple_x[1] = "world"
tuple_z[1] = "sunrise"
"""attempted to change the value of tuple_z"""_____no_output_____
</code>
# Dictionaries_____no_output_____Dictionaries are Python objects that hold key-value pairs. In the example dictionary below, the keys are the "name" and "eye_color" variables. They each have a value associated with them. A dictionary cannot have two of the same keys. _____no_output_____
<code>
# Creating a dictionary
dog = {"name": "dog",
"eye_color": "brown"}
print (dog)
print (dog["name"])
print (dog["eye_color"])
MAC = {"brand": "MAC", "color": "red"}
print (MAC)
print (MAC["brand"])
print (MAC["color"])
"""Created a dictionary for MAC lipstick"""
{'name': 'dog', 'eye_color': 'brown'}
dog
brown
{'brand': 'MAC', 'color': 'red'}
MAC
red
# Changing the value for a key
dog["eye_color"] = "green"
print (dog)
MAC["color"] = "pink"
print (MAC)
"""Changed the lipstick color from red to pink"""{'name': 'dog', 'eye_color': 'green'}
{'brand': 'MAC', 'color': 'pink'}
# Adding new key-value pairs
dog["age"] = 5
print (dog)
MAC["age"] = 1
print (MAC)
"""Added an aditional value (age)"""{'name': 'dog', 'eye_color': 'green', 'age': 5}
{'brand': 'MAC', 'color': 'pink', 'age': 1}
# Length of a dictionary
print (len(dog))
print (len(MAC))
"""Calculated length of MAC dictionary"""3
3
</code>
# If statements_____no_output_____You can use `if` statements to conditionally do something._____no_output_____
<code>
# If statement
x = 4
if x < 1:
score = "low"
elif x <= 4:
score = "medium"
else:
score = "high"
print (score)
x = 5
if x < 2:
score = "low"
elif x <= 5:
score = "medium"
else:
score = "high"
print (score)
x = 10
print (score)
x = 1
print (score)
"""Added additional if statements (x = 5)"""medium
medium
medium
medium
# If statment with a boolean
x = True
if x:
print ("it worked")
y = False
if y:
print ("it did not work")
z = True
if z:
print ("it almost worked")
"""Created true / false boolean statements"""it worked
it almost worked
</code>
# Loops_____no_output_____In Python, you can use `for` loop to iterate over the elements of a sequence such as a list or tuple, or use `while` loop to do something repeatedly as long as a condition holds._____no_output_____
<code>
# For loop
x = 2 # x variable will start at 2 instead of 1
for i in range(5): # goes from i=0 to i=4 range is 5 instead of 3
x += 1 # same as x = x + 1
print ("i={0}, x={1}".format(i, x)) # printing with multiple variables(
i=0, x=3
i=1, x=4
i=2, x=5
i=3, x=6
i=4, x=7
# Loop through items in a list
x = 2 # changed x variable to 2, now x will start at 3 instead of 2
for i in [0, 1, 2, 3, 4]: # added two additional numbers to the list
x += 1 # same as x = x +1
print ("i={0}, x={1}".format(i, x))i=0, x=3
i=1, x=4
i=2, x=5
i=3, x=6
i=4, x=7
# While loop
x = 10 # Changed variable from 3 to 10
while x > 3: # Changed the condition to 3
x -= 1 # same as x = x - 1
print (x)9
8
7
6
5
4
3
</code>
# Functions_____no_output_____Functions are a way to modularize reusable pieces of code. _____no_output_____
<code>
# Create a function
def Shamel(x): # Redefined function's name
x += 5 # Changed value of x
return x
# Use the function
score = 1
score = Shamel(x=score)
print (score)6
# Function with multiple inputs
def join_name (first_name, middle_name, last_name): # Re-defined function
joined_name = first_name + " " + middle_name + " " + last_name # Added middle name
return joined_name
# Use the function
first_name = "Sheri" # Change display information
middle_name = "Nicole"
last_name = "Lamb"
joined_name = join_name(first_name=first_name, middle_name=middle_name, last_name=last_name)
print (joined_name)Sheri Nicole Lamb
</code>
# Classes_____no_output_____Classes are a fundamental piece of object oriented programming in Python._____no_output_____
<code>
# Creating the class
class Cars(object): # Changed class to Cars
# Initialize the class
def __init__(self, brand, color, name): # Changed "species" to "brand"
self.brand = brand
self.color = color
self.name = name
# For printing
def __str__(self):
return "{0} {1} named {2}.".format(self.color, self.brand, self.name)
# Example function
def change_name(self, new_name):
self.name = new_name_____no_output_____# Creating an instance of a class
my_car = Cars(brand="Jeep", color="Spitfire Orange", name="Rover",) # Changed instances of car class
print (my_car)
print (my_car.name)Spitfire Orange Jeep named Rover.
Rover
# Using a class's function
my_car.change_name(new_name="Sunshine") # Changes cars name
print (my_car)
print (my_car.name)Spitfire Orange Jeep named Sunshine.
Sunshine
</code>
# Additional resources_____no_output_____This was a very quick look at Python and we'll be learning more in future lessons. If you want to learn more right now before diving into machine learning, check out this free course: [Free Python Course](https://www.codecademy.com/learn/learn-python)_____no_output_____
| {
"repository": "slamb89/ml_training",
"path": "notebooks/Python-in-2-days/D1_L3_Python/__Python_Summary__.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 33650,
"hexsha": "d0c3bddbf194a03addc5ce6ddc092986084000c6",
"max_line_length": 898,
"avg_line_length": 22.4183877415,
"alphanum_fraction": 0.4837741456
} |
# Notebook from bijanh/NLP-Proj-1
Path: HMM Tagger.ipynb
# Project: Part of Speech Tagging with Hidden Markov Models
---
### Introduction
Part of speech tagging is the process of determining the syntactic category of a word from the words in its surrounding context. It is often used to help disambiguate natural language phrases because it can be done quickly with high accuracy. Tagging can be used for many NLP tasks like determining correct pronunciation during speech synthesis (for example, _dis_-count as a noun vs dis-_count_ as a verb), for information retrieval, and for word sense disambiguation.
In this notebook, you'll use the [Pomegranate](http://pomegranate.readthedocs.io/) library to build a hidden Markov model for part of speech tagging using a "universal" tagset. Hidden Markov models have been able to achieve [>96% tag accuracy with larger tagsets on realistic text corpora](http://www.coli.uni-saarland.de/~thorsten/publications/Brants-ANLP00.pdf). Hidden Markov models have also been used for speech recognition and speech generation, machine translation, gene recognition for bioinformatics, and human gesture recognition for computer vision, and more.

The notebook already contains some code to get you started. You only need to add some new functionality in the areas indicated to complete the project; you will not need to modify the included code beyond what is requested. Sections that begin with **'IMPLEMENTATION'** in the header indicate that you must provide code in the block that follows. Instructions will be provided for each section, and the specifics of the implementation are marked in the code block with a 'TODO' statement. Please be sure to read the instructions carefully!_____no_output_____<div class="alert alert-block alert-info">
**Note:** Once you have completed all of the code implementations, you need to finalize your work by exporting the iPython Notebook as an HTML document. Before exporting the notebook to html, all of the code cells need to have been run so that reviewers can see the final implementation and output. You must then **export the notebook** by running the last cell in the notebook, or by using the menu above and navigating to **File -> Download as -> HTML (.html)** Your submissions should include both the `html` and `ipynb` files.
</div>_____no_output_____<div class="alert alert-block alert-info">
**Note:** Code and Markdown cells can be executed using the `Shift + Enter` keyboard shortcut. Markdown cells can be edited by double-clicking the cell to enter edit mode.
</div>_____no_output_____### The Road Ahead
You must complete Steps 1-3 below to pass the project. The section on Step 4 includes references & resources you can use to further explore HMM taggers.
- [Step 1](#Step-1:-Read-and-preprocess-the-dataset): Review the provided interface to load and access the text corpus
- [Step 2](#Step-2:-Build-a-Most-Frequent-Class-tagger): Build a Most Frequent Class tagger to use as a baseline
- [Step 3](#Step-3:-Build-an-HMM-tagger): Build an HMM Part of Speech tagger and compare to the MFC baseline
- [Step 4](#Step-4:-[Optional]-Improving-model-performance): (Optional) Improve the HMM tagger_____no_output_____<div class="alert alert-block alert-warning">
**Note:** Make sure you have selected a **Python 3** kernel in Workspaces or the hmm-tagger conda environment if you are running the Jupyter server on your own machine.
</div>_____no_output_____
<code>
# Jupyter "magic methods" -- only need to be run once per kernel restart
%load_ext autoreload
%aimport helpers, tests
%autoreload 1_____no_output_____# import python modules -- this cell needs to be run again if you make changes to any of the files
import matplotlib.pyplot as plt
import numpy as np
from IPython.core.display import HTML
from itertools import chain
from collections import Counter, defaultdict
from helpers import show_model, Dataset
from pomegranate import State, HiddenMarkovModel, DiscreteDistribution_____no_output_____
</code>
## Step 1: Read and preprocess the dataset
---
We'll start by reading in a text corpus and splitting it into a training and testing dataset. The data set is a copy of the [Brown corpus](https://en.wikipedia.org/wiki/Brown_Corpus) (originally from the [NLTK](https://www.nltk.org/) library) that has already been pre-processed to only include the [universal tagset](https://arxiv.org/pdf/1104.2086.pdf). You should expect to get slightly higher accuracy using this simplified tagset than the same model would achieve on a larger tagset like the full [Penn treebank tagset](https://www.ling.upenn.edu/courses/Fall_2003/ling001/penn_treebank_pos.html), but the process you'll follow would be the same.
The `Dataset` class provided in helpers.py will read and parse the corpus. You can generate your own datasets compatible with the reader by writing them to the following format. The dataset is stored in plaintext as a collection of words and corresponding tags. Each sentence starts with a unique identifier on the first line, followed by one tab-separated word/tag pair on each following line. Sentences are separated by a single blank line.
Example from the Brown corpus.
```
b100-38532
Perhaps ADV
it PRON
was VERB
right ADJ
; .
; .
b100-35577
...
```_____no_output_____
<code>
data = Dataset("tags-universal.txt", "brown-universal.txt", train_test_split=0.8)
print("There are {} sentences in the corpus.".format(len(data)))
print("There are {} sentences in the training set.".format(len(data.training_set)))
print("There are {} sentences in the testing set.".format(len(data.testing_set)))
assert len(data) == len(data.training_set) + len(data.testing_set), \
"The number of sentences in the training set + testing set should sum to the number of sentences in the corpus"There are 57340 sentences in the corpus.
There are 45872 sentences in the training set.
There are 11468 sentences in the testing set.
</code>
### The Dataset Interface
You can access (mostly) immutable references to the dataset through a simple interface provided through the `Dataset` class, which represents an iterable collection of sentences along with easy access to partitions of the data for training & testing. Review the reference below, then run and review the next few cells to make sure you understand the interface before moving on to the next step.
```
Dataset-only Attributes:
training_set - reference to a Subset object containing the samples for training
testing_set - reference to a Subset object containing the samples for testing
Dataset & Subset Attributes:
sentences - a dictionary with an entry {sentence_key: Sentence()} for each sentence in the corpus
keys - an immutable ordered (not sorted) collection of the sentence_keys for the corpus
vocab - an immutable collection of the unique words in the corpus
tagset - an immutable collection of the unique tags in the corpus
X - returns an array of words grouped by sentences ((w11, w12, w13, ...), (w21, w22, w23, ...), ...)
Y - returns an array of tags grouped by sentences ((t11, t12, t13, ...), (t21, t22, t23, ...), ...)
N - returns the number of distinct samples (individual words or tags) in the dataset
Methods:
stream() - returns an flat iterable over all (word, tag) pairs across all sentences in the corpus
__iter__() - returns an iterable over the data as (sentence_key, Sentence()) pairs
__len__() - returns the nubmer of sentences in the dataset
```
For example, consider a Subset, `subset`, of the sentences `{"s0": Sentence(("See", "Spot", "run"), ("VERB", "NOUN", "VERB")), "s1": Sentence(("Spot", "ran"), ("NOUN", "VERB"))}`. The subset will have these attributes:
```
subset.keys == {"s1", "s0"} # unordered
subset.vocab == {"See", "run", "ran", "Spot"} # unordered
subset.tagset == {"VERB", "NOUN"} # unordered
subset.X == (("Spot", "ran"), ("See", "Spot", "run")) # order matches .keys
subset.Y == (("NOUN", "VERB"), ("VERB", "NOUN", "VERB")) # order matches .keys
subset.N == 7 # there are a total of seven observations over all sentences
len(subset) == 2 # because there are two sentences
```
<div class="alert alert-block alert-info">
**Note:** The `Dataset` class is _convenient_, but it is **not** efficient. It is not suitable for huge datasets because it stores multiple redundant copies of the same data.
</div>_____no_output_____#### Sentences
`Dataset.sentences` is a dictionary of all sentences in the training corpus, each keyed to a unique sentence identifier. Each `Sentence` is itself an object with two attributes: a tuple of the words in the sentence named `words` and a tuple of the tag corresponding to each word named `tags`._____no_output_____
<code>
key = 'b100-38532'
print("Sentence: {}".format(key))
print("words:\n\t{!s}".format(data.sentences[key].words))
print("tags:\n\t{!s}".format(data.sentences[key].tags))Sentence: b100-38532
words:
('Perhaps', 'it', 'was', 'right', ';', ';')
tags:
('ADV', 'PRON', 'VERB', 'ADJ', '.', '.')
</code>
<div class="alert alert-block alert-info">
**Note:** The underlying iterable sequence is **unordered** over the sentences in the corpus; it is not guaranteed to return the sentences in a consistent order between calls. Use `Dataset.stream()`, `Dataset.keys`, `Dataset.X`, or `Dataset.Y` attributes if you need ordered access to the data.
</div>
#### Counting Unique Elements
You can access the list of unique words (the dataset vocabulary) via `Dataset.vocab` and the unique list of tags via `Dataset.tagset`._____no_output_____
<code>
print("There are a total of {} samples of {} unique words in the corpus."
.format(data.N, len(data.vocab)))
print("There are {} samples of {} unique words in the training set."
.format(data.training_set.N, len(data.training_set.vocab)))
print("There are {} samples of {} unique words in the testing set."
.format(data.testing_set.N, len(data.testing_set.vocab)))
print("There are {} words in the test set that are missing in the training set."
.format(len(data.testing_set.vocab - data.training_set.vocab)))
assert data.N == data.training_set.N + data.testing_set.N, \
"The number of training + test samples should sum to the total number of samples"There are a total of 1161192 samples of 56057 unique words in the corpus.
There are 928458 samples of 50536 unique words in the training set.
There are 232734 samples of 25112 unique words in the testing set.
There are 5521 words in the test set that are missing in the training set.
</code>
#### Accessing word and tag Sequences
The `Dataset.X` and `Dataset.Y` attributes provide access to ordered collections of matching word and tag sequences for each sentence in the dataset._____no_output_____
<code>
# accessing words with Dataset.X and tags with Dataset.Y
for i in range(2):
print("Sentence {}:".format(i + 1), data.X[i])
print()
print("Labels {}:".format(i + 1), data.Y[i])
print()Sentence 1: ('Mr.', 'Podger', 'had', 'thanked', 'him', 'gravely', ',', 'and', 'now', 'he', 'made', 'use', 'of', 'the', 'advice', '.')
Labels 1: ('NOUN', 'NOUN', 'VERB', 'VERB', 'PRON', 'ADV', '.', 'CONJ', 'ADV', 'PRON', 'VERB', 'NOUN', 'ADP', 'DET', 'NOUN', '.')
Sentence 2: ('But', 'there', 'seemed', 'to', 'be', 'some', 'difference', 'of', 'opinion', 'as', 'to', 'how', 'far', 'the', 'board', 'should', 'go', ',', 'and', 'whose', 'advice', 'it', 'should', 'follow', '.')
Labels 2: ('CONJ', 'PRT', 'VERB', 'PRT', 'VERB', 'DET', 'NOUN', 'ADP', 'NOUN', 'ADP', 'ADP', 'ADV', 'ADV', 'DET', 'NOUN', 'VERB', 'VERB', '.', 'CONJ', 'DET', 'NOUN', 'PRON', 'VERB', 'VERB', '.')
</code>
#### Accessing (word, tag) Samples
The `Dataset.stream()` method returns an iterator that chains together every pair of (word, tag) entries across all sentences in the entire corpus._____no_output_____
<code>
# use Dataset.stream() (word, tag) samples for the entire corpus
print("\nStream (word, tag) pairs:\n")
for i, pair in enumerate(data.stream()):
print("\t", pair)
if i > 5: break
Stream (word, tag) pairs:
('Mr.', 'NOUN')
('Podger', 'NOUN')
('had', 'VERB')
('thanked', 'VERB')
('him', 'PRON')
('gravely', 'ADV')
(',', '.')
</code>
For both our baseline tagger and the HMM model we'll build, we need to estimate the frequency of tags & words from the frequency counts of observations in the training corpus. In the next several cells you will complete functions to compute the counts of several sets of counts. _____no_output_____## Step 2: Build a Most Frequent Class tagger
---
Perhaps the simplest tagger (and a good baseline for tagger performance) is to simply choose the tag most frequently assigned to each word. This "most frequent class" tagger inspects each observed word in the sequence and assigns it the label that was most often assigned to that word in the corpus._____no_output_____### IMPLEMENTATION: Pair Counts
Complete the function below that computes the joint frequency counts for two input sequences._____no_output_____
<code>
def pair_counts(sequences_A, sequences_B):
"""Return a dictionary keyed to each unique value in the first sequence list
that counts the number of occurrences of the corresponding value from the
second sequences list.
For example, if sequences_A is tags and sequences_B is the corresponding
words, then if 1244 sequences contain the word "time" tagged as a NOUN, then
you should return a dictionary such that pair_counts[NOUN][time] == 1244
"""
# TODO: Finish this function!
dict = {}
for i in range(len(sequences_A)):
seq_A = sequences_A[i]
seq_B = sequences_B[i]
for j in range(len(seq_A)):
element_A = seq_A[j]
element_B = seq_B[j]
if element_A in dict:
if element_B in dict[element_A]:
dict[element_A][element_B] += 1
else:
dict[element_A][element_B] = 1
else:
dict[element_A] = {}
dict[element_A][element_B] = 1
return dict
# Calculate C(t_i, w_i)
emission_counts = pair_counts(data.Y, data.X)
assert len(emission_counts) == 12, \
"Uh oh. There should be 12 tags in your dictionary."
assert max(emission_counts["NOUN"], key=emission_counts["NOUN"].get) == 'time', \
"Hmmm...'time' is expected to be the most common NOUN."
HTML('<div class="alert alert-block alert-success">Your emission counts look good!</div>')_____no_output_____
</code>
### IMPLEMENTATION: Most Frequent Class Tagger
Use the `pair_counts()` function and the training dataset to find the most frequent class label for each word in the training data, and populate the `mfc_table` below. The table keys should be words, and the values should be the appropriate tag string.
The `MFCTagger` class is provided to mock the interface of Pomegranite HMM models so that they can be used interchangeably._____no_output_____
<code>
# Create a lookup table mfc_table where mfc_table[word] contains the tag label most frequently assigned to that word
from collections import namedtuple
FakeState = namedtuple("FakeState", "name")
class MFCTagger:
# NOTE: You should not need to modify this class or any of its methods
missing = FakeState(name="<MISSING>")
def __init__(self, table):
self.table = defaultdict(lambda: MFCTagger.missing)
self.table.update({word: FakeState(name=tag) for word, tag in table.items()})
def viterbi(self, seq):
"""This method simplifies predictions by matching the Pomegranate viterbi() interface"""
return 0., list(enumerate(["<start>"] + [self.table[w] for w in seq] + ["<end>"]))
# TODO: calculate the frequency of each tag being assigned to each word (hint: similar, but not
# the same as the emission probabilities) and use it to fill the mfc_table
word_counts = pair_counts(data.X, data.Y)
mfc_table = {}
for word in data.training_set.vocab:
mfc_table[word] = max(word_counts[word], key=word_counts[word].get)
# DO NOT MODIFY BELOW THIS LINE
mfc_model = MFCTagger(mfc_table) # Create a Most Frequent Class tagger instance
assert len(mfc_table) == len(data.training_set.vocab), ""
assert all(k in data.training_set.vocab for k in mfc_table.keys()), ""
assert sum(int(k not in mfc_table) for k in data.testing_set.vocab) == 5521, ""
HTML('<div class="alert alert-block alert-success">Your MFC tagger has all the correct words!</div>')_____no_output_____
</code>
### Making Predictions with a Model
The helper functions provided below interface with Pomegranate network models & the mocked MFCTagger to take advantage of the [missing value](http://pomegranate.readthedocs.io/en/latest/nan.html) functionality in Pomegranate through a simple sequence decoding function. Run these functions, then run the next cell to see some of the predictions made by the MFC tagger._____no_output_____
<code>
def replace_unknown(sequence):
"""Return a copy of the input sequence where each unknown word is replaced
by the literal string value 'nan'. Pomegranate will ignore these values
during computation.
"""
return [w if w in data.training_set.vocab else 'nan' for w in sequence]
def simplify_decoding(X, model):
"""X should be a 1-D sequence of observations for the model to predict"""
_, state_path = model.viterbi(replace_unknown(X))
return [state[1].name for state in state_path[1:-1]] # do not show the start/end state predictions_____no_output_____
</code>
### Example Decoding Sequences with MFC Tagger_____no_output_____
<code>
for key in data.testing_set.keys[:3]:
print("Sentence Key: {}\n".format(key))
print("Predicted labels:\n-----------------")
print(simplify_decoding(data.sentences[key].words, mfc_model))
print()
print("Actual labels:\n--------------")
print(data.sentences[key].tags)
print("\n")Sentence Key: b100-28144
Predicted labels:
-----------------
['CONJ', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'CONJ', 'NOUN', 'NUM', '.', '.', 'NOUN', '.', '.']
Actual labels:
--------------
('CONJ', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'CONJ', 'NOUN', 'NUM', '.', '.', 'NOUN', '.', '.')
Sentence Key: b100-23146
Predicted labels:
-----------------
['PRON', 'VERB', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', 'NOUN', 'VERB', 'VERB', '.', 'ADP', 'VERB', 'DET', 'NOUN', 'ADP', 'NOUN', 'ADP', 'DET', 'NOUN', '.']
Actual labels:
--------------
('PRON', 'VERB', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', 'NOUN', 'VERB', 'VERB', '.', 'ADP', 'VERB', 'DET', 'NOUN', 'ADP', 'NOUN', 'ADP', 'DET', 'NOUN', '.')
Sentence Key: b100-35462
Predicted labels:
-----------------
['DET', 'ADJ', 'NOUN', 'VERB', 'VERB', 'VERB', 'ADP', 'DET', 'ADJ', 'ADJ', 'NOUN', 'ADP', 'DET', 'ADJ', 'NOUN', '.', 'ADP', 'ADJ', 'NOUN', '.', 'CONJ', 'ADP', 'DET', '<MISSING>', 'ADP', 'ADJ', 'ADJ', '.', 'ADJ', '.', 'CONJ', 'ADJ', 'NOUN', 'ADP', 'ADV', 'NOUN', '.']
Actual labels:
--------------
('DET', 'ADJ', 'NOUN', 'VERB', 'VERB', 'VERB', 'ADP', 'DET', 'ADJ', 'ADJ', 'NOUN', 'ADP', 'DET', 'ADJ', 'NOUN', '.', 'ADP', 'ADJ', 'NOUN', '.', 'CONJ', 'ADP', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', '.', 'ADJ', '.', 'CONJ', 'ADJ', 'NOUN', 'ADP', 'ADJ', 'NOUN', '.')
</code>
### Evaluating Model Accuracy
The function below will evaluate the accuracy of the MFC tagger on the collection of all sentences from a text corpus. _____no_output_____
<code>
def accuracy(X, Y, model):
"""Calculate the prediction accuracy by using the model to decode each sequence
in the input X and comparing the prediction with the true labels in Y.
The X should be an array whose first dimension is the number of sentences to test,
and each element of the array should be an iterable of the words in the sequence.
The arrays X and Y should have the exact same shape.
X = [("See", "Spot", "run"), ("Run", "Spot", "run", "fast"), ...]
Y = [(), (), ...]
"""
correct = total_predictions = 0
for observations, actual_tags in zip(X, Y):
# The model.viterbi call in simplify_decoding will return None if the HMM
# raises an error (for example, if a test sentence contains a word that
# is out of vocabulary for the training set). Any exception counts the
# full sentence as an error (which makes this a conservative estimate).
try:
most_likely_tags = simplify_decoding(observations, model)
correct += sum(p == t for p, t in zip(most_likely_tags, actual_tags))
except:
pass
total_predictions += len(observations)
return correct / total_predictions_____no_output_____
</code>
#### Evaluate the accuracy of the MFC tagger
Run the next cell to evaluate the accuracy of the tagger on the training and test corpus._____no_output_____
<code>
mfc_training_acc = accuracy(data.training_set.X, data.training_set.Y, mfc_model)
print("training accuracy mfc_model: {:.2f}%".format(100 * mfc_training_acc))
mfc_testing_acc = accuracy(data.testing_set.X, data.testing_set.Y, mfc_model)
print("testing accuracy mfc_model: {:.2f}%".format(100 * mfc_testing_acc))
assert mfc_training_acc >= 0.955, "Uh oh. Your MFC accuracy on the training set doesn't look right."
assert mfc_testing_acc >= 0.925, "Uh oh. Your MFC accuracy on the testing set doesn't look right."
HTML('<div class="alert alert-block alert-success">Your MFC tagger accuracy looks correct!</div>')training accuracy mfc_model: 95.71%
testing accuracy mfc_model: 93.13%
</code>
## Step 3: Build an HMM tagger
---
The HMM tagger has one hidden state for each possible tag, and parameterized by two distributions: the emission probabilties giving the conditional probability of observing a given **word** from each hidden state, and the transition probabilities giving the conditional probability of moving between **tags** during the sequence.
We will also estimate the starting probability distribution (the probability of each **tag** being the first tag in a sequence), and the terminal probability distribution (the probability of each **tag** being the last tag in a sequence).
The maximum likelihood estimate of these distributions can be calculated from the frequency counts as described in the following sections where you'll implement functions to count the frequencies, and finally build the model. The HMM model will make predictions according to the formula:
$$t_i^n = \underset{t_i^n}{\mathrm{argmax}} \prod_{i=1}^n P(w_i|t_i) P(t_i|t_{i-1})$$
Refer to Speech & Language Processing [Chapter 10](https://web.stanford.edu/~jurafsky/slp3/10.pdf) for more information._____no_output_____### IMPLEMENTATION: Unigram Counts
Complete the function below to estimate the co-occurrence frequency of each symbol over all of the input sequences. The unigram probabilities in our HMM model are estimated from the formula below, where N is the total number of samples in the input. (You only need to compute the counts for now.)
$$P(tag_1) = \frac{C(tag_1)}{N}$$_____no_output_____
<code>
def unigram_counts(sequences):
"""Return a dictionary keyed to each unique value in the input sequence list that
counts the number of occurrences of the value in the sequences list. The sequences
collection should be a 2-dimensional array.
For example, if the tag NOUN appears 275558 times over all the input sequences,
then you should return a dictionary such that your_unigram_counts[NOUN] == 275558.
"""
# TODO: Finish this function!
my_unigram_counts = {}
for tag in sequences:
if tag in my_unigram_counts:
my_unigram_counts[tag] += 1
else:
my_unigram_counts[tag] = 1
# Easier method: return Counter(sequences)
return my_unigram_counts
# TODO: call unigram_counts with a list of tag sequences from the training set
tags = [tag for word, tag in data.stream()]
tag_unigrams = unigram_counts(tags) # TODO: YOUR CODE HERE
assert set(tag_unigrams.keys()) == data.training_set.tagset, \
"Uh oh. It looks like your tag counts doesn't include all the tags!"
assert min(tag_unigrams, key=tag_unigrams.get) == 'X', \
"Hmmm...'X' is expected to be the least common class"
assert max(tag_unigrams, key=tag_unigrams.get) == 'NOUN', \
"Hmmm...'NOUN' is expected to be the most common class"
HTML('<div class="alert alert-block alert-success">Your tag unigrams look good!</div>')_____no_output_____
</code>
### IMPLEMENTATION: Bigram Counts
Complete the function below to estimate the co-occurrence frequency of each pair of symbols in each of the input sequences. These counts are used in the HMM model to estimate the bigram probability of two tags from the frequency counts according to the formula: $$P(tag_2|tag_1) = \frac{C(tag_2|tag_1)}{C(tag_2)}$$
_____no_output_____
<code>
import itertools
def pairwise(iterable):
t, t_1 = itertools.tee(iterable)
next(t_1, 'end')
return zip(t, t_1)
def bigram_counts(sequences):
"""Return a dictionary keyed to each unique PAIR of values in the input sequences
list that counts the number of occurrences of pair in the sequences list. The input
should be a 2-dimensional array.
For example, if the pair of tags (NOUN, VERB) appear 61582 times, then you should
return a dictionary such that your_bigram_counts[(NOUN, VERB)] == 61582
"""
# TODO: Finish this function!
prior = ''
my_bigram_counts = {}
for tag in sequences:
if prior != '':
if (prior, tag) in my_bigram_counts:
my_bigram_counts[prior, tag] += 1
else:
my_bigram_counts[prior, tag] = 1
prior = tag
# Easier method: return dict(Counter(pairwise(sequences)))
return my_bigram_counts
# TODO: call bigram_counts with a list of tag sequences from the training set
tags = [tag for word, tag in data.stream()]
tag_bigrams = bigram_counts(tags)
assert len(tag_bigrams) == 144, \
"Uh oh. There should be 144 pairs of bigrams (12 tags x 12 tags)"
assert min(tag_bigrams, key=tag_bigrams.get) in [('X', 'NUM'), ('PRON', 'X')], \
"Hmmm...The least common bigram should be one of ('X', 'NUM') or ('PRON', 'X')."
assert max(tag_bigrams, key=tag_bigrams.get) in [('DET', 'NOUN')], \
"Hmmm...('DET', 'NOUN') is expected to be the most common bigram."
HTML('<div class="alert alert-block alert-success">Your tag bigrams look good!</div>')_____no_output_____
</code>
### IMPLEMENTATION: Sequence Starting Counts
Complete the code below to estimate the bigram probabilities of a sequence starting with each tag._____no_output_____
<code>
def starting_counts(sequences):
"""Return a dictionary keyed to each unique value in the input sequences list
that counts the number of occurrences where that value is at the beginning of
a sequence.
For example, if 8093 sequences start with NOUN, then you should return a
dictionary such that your_starting_counts[NOUN] == 8093
"""
# TODO: Finish this function!
my_start_counts = {}
for start, end in sequences:
count = sequences[start, end]
if start in my_start_counts:
my_start_counts[start] += count
else:
my_start_counts[start] = count
return my_start_counts
# TODO: Calculate the count of each tag starting a sequence
tag_starts = starting_counts(tag_bigrams)
assert len(tag_starts) == 12, "Uh oh. There should be 12 tags in your dictionary."
assert min(tag_starts, key=tag_starts.get) == 'X', "Hmmm...'X' is expected to be the least common starting bigram."
assert max(tag_starts, key=tag_starts.get) != 'DET', "Hmmm...'DET' is expected to be the most common starting bigram."
HTML('<div class="alert alert-block alert-success">Your starting tag counts look good!</div>')_____no_output_____
</code>
### IMPLEMENTATION: Sequence Ending Counts
Complete the function below to estimate the bigram probabilities of a sequence ending with each tag._____no_output_____
<code>
def ending_counts(sequences):
"""Return a dictionary keyed to each unique value in the input sequences list
that counts the number of occurrences where that value is at the end of
a sequence.
For example, if 18 sequences end with DET, then you should return a
dictionary such that your_starting_counts[DET] == 18
"""
# TODO: Finish this function!
my_end_counts = {}
for start, end in sequences:
count = sequences[start, end]
if end in my_end_counts:
my_end_counts[end] += count
else:
my_end_counts[end] = count
return my_end_counts
# TODO: Calculate the count of each tag ending a sequence
tag_ends = ending_counts(tag_bigrams)
assert len(tag_ends) == 12, "Uh oh. There should be 12 tags in your dictionary."
assert min(tag_ends, key=tag_ends.get) in ['X', 'CONJ'], "Hmmm...'X' or 'CONJ' should be the least common ending bigram."
assert max(tag_ends, key=tag_ends.get) != '.', "Hmmm...'.' is expected to be the most common ending bigram."
HTML('<div class="alert alert-block alert-success">Your ending tag counts look good!</div>')_____no_output_____
</code>
### IMPLEMENTATION: Basic HMM Tagger
Use the tag unigrams and bigrams calculated above to construct a hidden Markov tagger.
- Add one state per tag
- The emission distribution at each state should be estimated with the formula: $P(w|t) = \frac{C(t, w)}{C(t)}$
- Add an edge from the starting state `basic_model.start` to each tag
- The transition probability should be estimated with the formula: $P(t|start) = \frac{C(start, t)}{C(start)}$
- Add an edge from each tag to the end state `basic_model.end`
- The transition probability should be estimated with the formula: $P(end|t) = \frac{C(t, end)}{C(t)}$
- Add an edge between _every_ pair of tags
- The transition probability should be estimated with the formula: $P(t_2|t_1) = \frac{C(t_1, t_2)}{C(t_1)}$_____no_output_____
<code>
basic_model = HiddenMarkovModel(name="base-hmm-tagger")
# TODO: create states with emission probability distributions P(word | tag) and add to the model
# (Hint: you may need to loop & create/add new states)
tags = [tag for word, tag in data.stream()]
words = [word for word, tag in data.stream()]
tags_count = unigram_counts(tags)
tag_words_count = pair_counts([tags], [words])
states = []
for tag, words_dict in tag_words_count.items():
total = float(sum(words_dict.values()))
distribution = {word: count/total for word, count in words_dict.items()}
tag_emissions = DiscreteDistribution(distribution)
tag_state = State(tag_emissions, name=tag)
states.append(tag_state)
basic_model.add_states(states)
# TODO: add edges between states for the observed transition frequencies P(tag_i | tag_i-1)
# (Hint: you may need to loop & add transitions
transition_prob_pair = {}
for key in tag_bigrams.keys():
transition_prob_pair[key] = tag_bigrams.get(key)/tags_count[key[0]]
for tag_state in states :
for next_tag_state in states :
basic_model.add_transition(tag_state,next_tag_state,transition_prob_pair[(tag_state.name,next_tag_state.name)])
starting_tag_count = starting_counts(tag_bigrams) #the number of times a tag occured at the start
ending_tag_count = ending_counts(tag_bigrams) #the number of times a tag occured at the end
start_prob = {}
for tag in tags:
start_prob[tag] = starting_tag_count[tag]/tags_count[tag]
for tag_state in states :
basic_model.add_transition(basic_model.start, tag_state, start_prob[tag_state.name])
end_prob = {}
for tag in tags:
end_prob[tag] = ending_tag_count[tag]/tags_count[tag]
for tag_state in states:
basic_model.add_transition(tag_state, basic_model.end, end_prob[tag_state.name])
# NOTE: YOU SHOULD NOT NEED TO MODIFY ANYTHING BELOW THIS LINE
# finalize the model
basic_model.bake()
assert all(tag in set(s.name for s in basic_model.states) for tag in data.training_set.tagset), \
"Every state in your network should use the name of the associated tag, which must be one of the training set tags."
assert basic_model.edge_count() == 168, \
("Your network should have an edge from the start node to each state, one edge between every " +
"pair of tags (states), and an edge from each state to the end node.")
HTML('<div class="alert alert-block alert-success">Your HMM network topology looks good!</div>')_____no_output_____hmm_training_acc = accuracy(data.training_set.X, data.training_set.Y, basic_model)
print("training accuracy basic hmm model: {:.2f}%".format(100 * hmm_training_acc))
hmm_testing_acc = accuracy(data.testing_set.X, data.testing_set.Y, basic_model)
print("testing accuracy basic hmm model: {:.2f}%".format(100 * hmm_testing_acc))
assert hmm_training_acc > 0.97, "Uh oh. Your HMM accuracy on the training set doesn't look right."
assert hmm_testing_acc > 0.955, "Uh oh. Your HMM accuracy on the testing set doesn't look right."
HTML('<div class="alert alert-block alert-success">Your HMM tagger accuracy looks correct! Congratulations, you\'ve finished the project.</div>')training accuracy basic hmm model: 97.51%
testing accuracy basic hmm model: 96.14%
</code>
### Example Decoding Sequences with the HMM Tagger_____no_output_____
<code>
for key in data.testing_set.keys[:3]:
print("Sentence Key: {}\n".format(key))
print("Predicted labels:\n-----------------")
print(simplify_decoding(data.sentences[key].words, basic_model))
print()
print("Actual labels:\n--------------")
print(data.sentences[key].tags)
print("\n")Sentence Key: b100-28144
Predicted labels:
-----------------
['CONJ', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'CONJ', 'NOUN', 'NUM', '.', '.', 'NOUN', '.', '.']
Actual labels:
--------------
('CONJ', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'NOUN', 'NUM', '.', 'CONJ', 'NOUN', 'NUM', '.', '.', 'NOUN', '.', '.')
Sentence Key: b100-23146
Predicted labels:
-----------------
['PRON', 'VERB', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', 'NOUN', 'VERB', 'VERB', '.', 'ADP', 'VERB', 'DET', 'NOUN', 'ADP', 'NOUN', 'ADP', 'DET', 'NOUN', '.']
Actual labels:
--------------
('PRON', 'VERB', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', 'NOUN', 'VERB', 'VERB', '.', 'ADP', 'VERB', 'DET', 'NOUN', 'ADP', 'NOUN', 'ADP', 'DET', 'NOUN', '.')
Sentence Key: b100-35462
Predicted labels:
-----------------
['DET', 'ADJ', 'NOUN', 'VERB', 'VERB', 'VERB', 'ADP', 'DET', 'ADJ', 'ADJ', 'NOUN', 'ADP', 'DET', 'ADJ', 'NOUN', '.', 'ADP', 'ADJ', 'NOUN', '.', 'CONJ', 'ADP', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', '.', 'ADJ', '.', 'CONJ', 'ADJ', 'NOUN', 'ADP', 'ADJ', 'NOUN', '.']
Actual labels:
--------------
('DET', 'ADJ', 'NOUN', 'VERB', 'VERB', 'VERB', 'ADP', 'DET', 'ADJ', 'ADJ', 'NOUN', 'ADP', 'DET', 'ADJ', 'NOUN', '.', 'ADP', 'ADJ', 'NOUN', '.', 'CONJ', 'ADP', 'DET', 'NOUN', 'ADP', 'ADJ', 'ADJ', '.', 'ADJ', '.', 'CONJ', 'ADJ', 'NOUN', 'ADP', 'ADJ', 'NOUN', '.')
</code>
## Finishing the project
---
<div class="alert alert-block alert-info">
**Note:** **SAVE YOUR NOTEBOOK**, then run the next cell to generate an HTML copy. You will zip & submit both this file and the HTML copy for review.
</div>_____no_output_____
<code>
!!jupyter nbconvert *.ipynb_____no_output_____
</code>
## Step 4: [Optional] Improving model performance
---
There are additional enhancements that can be incorporated into your tagger that improve performance on larger tagsets where the data sparsity problem is more significant. The data sparsity problem arises because the same amount of data split over more tags means there will be fewer samples in each tag, and there will be more missing data tags that have zero occurrences in the data. The techniques in this section are optional.
- [Laplace Smoothing](https://en.wikipedia.org/wiki/Additive_smoothing) (pseudocounts)
Laplace smoothing is a technique where you add a small, non-zero value to all observed counts to offset for unobserved values.
- Backoff Smoothing
Another smoothing technique is to interpolate between n-grams for missing data. This method is more effective than Laplace smoothing at combatting the data sparsity problem. Refer to chapters 4, 9, and 10 of the [Speech & Language Processing](https://web.stanford.edu/~jurafsky/slp3/) book for more information.
- Extending to Trigrams
HMM taggers have achieved better than 96% accuracy on this dataset with the full Penn treebank tagset using an architecture described in [this](http://www.coli.uni-saarland.de/~thorsten/publications/Brants-ANLP00.pdf) paper. Altering your HMM to achieve the same performance would require implementing deleted interpolation (described in the paper), incorporating trigram probabilities in your frequency tables, and re-implementing the Viterbi algorithm to consider three consecutive states instead of two.
### Obtain the Brown Corpus with a Larger Tagset
Run the code below to download a copy of the brown corpus with the full NLTK tagset. You will need to research the available tagset information in the NLTK docs and determine the best way to extract the subset of NLTK tags you want to explore. If you write the following the format specified in Step 1, then you can reload the data using all of the code above for comparison.
Refer to [Chapter 5](http://www.nltk.org/book/ch05.html) of the NLTK book for more information on the available tagsets._____no_output_____
<code>
import nltk
from nltk import pos_tag, word_tokenize
from nltk.corpus import brown
nltk.download('brown')
training_corpus = nltk.corpus.brown
training_corpus.tagged_sents()[0]_____no_output_____
</code>
| {
"repository": "bijanh/NLP-Proj-1",
"path": "HMM Tagger.ipynb",
"matched_keywords": [
"bioinformatics"
],
"stars": null,
"size": 52837,
"hexsha": "d0c8252c5de62f8e4a05b85e098f2c214569858a",
"max_line_length": 660,
"avg_line_length": 42.16839585,
"alphanum_fraction": 0.5839279293
} |
# Notebook from oonid/growth-hacking-with-nlp-sentiment-analysis
Path: create_dataset.ipynb
<a href="https://colab.research.google.com/github/oonid/growth-hacking-with-nlp-sentiment-analysis/blob/master/create_dataset.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____# Evaluate Amazon Video Games Review Dataset_____no_output_____
<code>
# ndjson to handle newline delimited json
!pip install ndjson
# update imbalanced-learn lib on colab
!pip install --upgrade imbalanced-learnRequirement already satisfied: ndjson in /usr/local/lib/python3.6/dist-packages (0.3.1)
Requirement already up-to-date: imbalanced-learn in /usr/local/lib/python3.6/dist-packages (0.6.2)
Requirement already satisfied, skipping upgrade: scipy>=0.17 in /usr/local/lib/python3.6/dist-packages (from imbalanced-learn) (1.4.1)
Requirement already satisfied, skipping upgrade: joblib>=0.11 in /usr/local/lib/python3.6/dist-packages (from imbalanced-learn) (0.15.1)
Requirement already satisfied, skipping upgrade: numpy>=1.11 in /usr/local/lib/python3.6/dist-packages (from imbalanced-learn) (1.18.4)
Requirement already satisfied, skipping upgrade: scikit-learn>=0.22 in /usr/local/lib/python3.6/dist-packages (from imbalanced-learn) (0.22.2.post1)
# all imports and related
%matplotlib inline
import pandas as pd
import numpy as np
import altair as alt
import ndjson
from collections import Counter
from imblearn.under_sampling import RandomUnderSampler
Using TensorFlow backend.
# get dataset, extract from gzip (overwrite), and preview data on file
!wget http://deepyeti.ucsd.edu/jianmo/amazon/categoryFilesSmall/Video_Games_5.json.gz
!yes y | gunzip Video_Games_5.json.gz
!head Video_Games_5.json--2020-05-29 14:17:40-- http://deepyeti.ucsd.edu/jianmo/amazon/categoryFilesSmall/Video_Games_5.json.gz
Resolving deepyeti.ucsd.edu (deepyeti.ucsd.edu)... 169.228.63.50
Connecting to deepyeti.ucsd.edu (deepyeti.ucsd.edu)|169.228.63.50|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 154050105 (147M) [application/octet-stream]
Saving to: ‘Video_Games_5.json.gz’
Video_Games_5.json. 100%[===================>] 146.91M 43.6MB/s in 3.7s
2020-05-29 14:17:44 (39.2 MB/s) - ‘Video_Games_5.json.gz’ saved [154050105/154050105]
{"overall": 5.0, "verified": true, "reviewTime": "10 17, 2015", "reviewerID": "A1HP7NVNPFMA4N", "asin": "0700026657", "reviewerName": "Ambrosia075", "reviewText": "This game is a bit hard to get the hang of, but when you do it's great.", "summary": "but when you do it's great.", "unixReviewTime": 1445040000}
{"overall": 4.0, "verified": false, "reviewTime": "07 27, 2015", "reviewerID": "A1JGAP0185YJI6", "asin": "0700026657", "reviewerName": "travis", "reviewText": "I played it a while but it was alright. The steam was a bit of trouble. The more they move these game to steam the more of a hard time I have activating and playing a game. But in spite of that it was fun, I liked it. Now I am looking forward to anno 2205 I really want to play my way to the moon.", "summary": "But in spite of that it was fun, I liked it", "unixReviewTime": 1437955200}
{"overall": 3.0, "verified": true, "reviewTime": "02 23, 2015", "reviewerID": "A1YJWEXHQBWK2B", "asin": "0700026657", "reviewerName": "Vincent G. Mezera", "reviewText": "ok game.", "summary": "Three Stars", "unixReviewTime": 1424649600}
{"overall": 2.0, "verified": true, "reviewTime": "02 20, 2015", "reviewerID": "A2204E1TH211HT", "asin": "0700026657", "reviewerName": "Grandma KR", "reviewText": "found the game a bit too complicated, not what I expected after having played 1602, 1503, and 1701", "summary": "Two Stars", "unixReviewTime": 1424390400}
{"overall": 5.0, "verified": true, "reviewTime": "12 25, 2014", "reviewerID": "A2RF5B5H74JLPE", "asin": "0700026657", "reviewerName": "jon", "reviewText": "great game, I love it and have played it since its arrived", "summary": "love this game", "unixReviewTime": 1419465600}
{"overall": 4.0, "verified": true, "reviewTime": "11 13, 2014", "reviewerID": "A11V6ZJ2FVQY1D", "asin": "0700026657", "reviewerName": "IBRAHIM ALBADI", "reviewText": "i liked a lot some time that i haven't play a wonderfull game very simply and funny game verry good game.", "summary": "Anno 2070", "unixReviewTime": 1415836800}
{"overall": 1.0, "verified": false, "reviewTime": "08 2, 2014", "reviewerID": "A1KXJ1ELZIU05C", "asin": "0700026657", "reviewerName": "Creation27", "reviewText": "I'm an avid gamer, but Anno 2070 is an INSULT to gaming. It is so buggy and half-finished that the first campaign doesn't even work properly and the DRM is INCREDIBLY frustrating to deal with.\n\nOnce you manage to work your way past the massive amounts of bugs and get through the DRM, HOURS later you finally figure out that the game has no real tutorial, so you stuck just clicking around randomly.\n\nSad, sad, sad, example of a game that could have been great but FTW.", "summary": "Avoid This Game - Filled with Bugs", "unixReviewTime": 1406937600}
{"overall": 5.0, "verified": true, "reviewTime": "03 3, 2014", "reviewerID": "A1WK5I4874S3O2", "asin": "0700026657", "reviewerName": "WhiteSkull", "reviewText": "I bought this game thinking it would be pretty cool and that i might play it for a week or two and be done. Boy was I wrong! From the moment I finally got the gamed Fired up (the other commentors on this are right, it takes forever and u are forced to create an account) I watched as it booted up I could tell right off the bat that ALOT of thought went into making this game. If you have ever played Sim city, then this game is a must try as you will easily navigate thru it and its multi layers. I have been playing htis now for a month straight, and I am STILL discovering layers of complexity in the game. There are a few things in the game that could used tweaked, but all in all this is a 5 star game.", "summary": "A very good game balance of skill with depth of choices", "unixReviewTime": 1393804800}
{"overall": 5.0, "verified": true, "reviewTime": "02 21, 2014", "reviewerID": "AV969NA4CBP10", "asin": "0700026657", "reviewerName": "Travis B. Moore", "reviewText": "I have played the old anno 1701 AND 1503. this game looks great but is more complex than the previous versions of the game. I found a lot of things lacking such as the sources of power and an inability to store energy with batteries or regenertive fuel cells as buildings in the game need power. Trade is about the same. My main beef with this it requires an internet connection. Other than that it has wonderful artistry and graphics. It is the same as anno 1701 but set in a future world where global warmming as flood the land and resource scarcity has sent human kind to look to the deep ocean for valuable minerals. I recoment the deep ocean expansion or complete if you get this. I found the ai instructor a little corny but other than that the game has some real polish. I wrote my 2 cents worth on suggestions on anno 2070 wiki and you can read 3 pages on that for game ideas I had.", "summary": "Anno 2070 more like anno 1701", "unixReviewTime": 1392940800}
{"overall": 4.0, "verified": true, "reviewTime": "06 27, 2013", "reviewerID": "A1EO9BFUHTGWKZ", "asin": "0700026657", "reviewerName": "johnnyz3", "reviewText": "I liked it and had fun with it, played for a while and got my money's worth. You can certainly go further than I did but I got frustrated with the fact that here we are in this new start and still taking from the earth rather than living with it. Better than simcity in that respect and maybe the best we could hope for.", "summary": "Pretty fun", "unixReviewTime": 1372291200}
# load from file-like objects
with open('Video_Games_5.json') as f:
vg5 = ndjson.load(f)
print('data loaded as {} with len {}'.format(type(vg5), len(vg5)))
# sample out 2 data
vg5[:2]data loaded as <class 'list'> with len 497577
# load list of dict as panda DataFrame
df = pd.DataFrame(vg5)
df.head()_____no_output_____# describe to understand values of column overall (next as ratings)
df.describe()_____no_output_____# create copy of DataFrame with overall as index, to prepare plotting
dfo = df.set_index('overall')
dfo.head()_____no_output_____# group data by column overall (currently as index) and count the variants
dfo.groupby(dfo.index).count()_____no_output_____# plot grouped data by overall related to column reviewText (next as reviews)
dfo.groupby(dfo.index)['reviewText'].count().plot(kind='bar')_____no_output_____# add altair chart based on sample solutions
rating_counts = Counter(df.overall.tolist())
chart_data = pd.DataFrame(
{'ratings': [str(e) for e in list(rating_counts.keys())],
'counts': list(rating_counts.values())})
chart = alt.Chart(chart_data).mark_bar().encode(x="ratings", y="counts")
chart_____no_output_____# dataset with only two columns (overall, reviewText) as numpy array
X = df[['overall', 'reviewText']].to_numpy()
print('dataset X shape: {} type: {}'.format(X.shape, type(X)))
# using column overall as label
y = df['overall'].to_numpy()
print('label y shape: {} type: {}'.format(y.shape, type(y)))
dataset X shape: (497577, 2) type: <class 'numpy.ndarray'>
label y shape: (497577,) type: <class 'numpy.ndarray'>
</code>
# Generating small_corpus_____no_output_____
<code>
# predefined sampling strategy
sampling_strategy = {1.0: 1500, 2.0: 500, 3.0: 500, 4.0: 500, 5.0: 1500}
random_state = 42 # to get identical results with sample solution
rus = RandomUnderSampler(random_state=random_state,
sampling_strategy=sampling_strategy)
X_res, y_res = rus.fit_resample(X, y)
print('initial label: {}'.format(Counter(y)))
print('result label: {}'.format(Counter(y_res)))initial label: Counter({5.0: 299759, 4.0: 93654, 3.0: 49146, 1.0: 30883, 2.0: 24135})
result label: Counter({1.0: 1500, 5.0: 1500, 2.0: 500, 3.0: 500, 4.0: 500})
# convert from numpy array back to pandas DataFrame
small_corpus = pd.DataFrame({'ratings': X_res[:, 0], 'reviews': X_res[:, 1]})
# set ratings column type as int32
small_corpus['ratings'] = small_corpus['ratings'].astype('int32')
# get info of small_corpus DataFrame with total 1500+500+500+500+1500 entries
small_corpus.info()
small_corpus.head()<class 'pandas.core.frame.DataFrame'>
RangeIndex: 4500 entries, 0 to 4499
Data columns (total 2 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 ratings 4500 non-null int32
1 reviews 4496 non-null object
dtypes: int32(1), object(1)
memory usage: 52.9+ KB
# export small_corpus to csv (1500+500+500+500+1500), without index
small_corpus.to_csv('small_corpus.csv', index=False)_____no_output_____
</code>
# Generating big_corpus_____no_output_____
<code>
random_state = 42 # to get identical results with sample solution
np.random.seed(random_state)
# get 100.000 on random ratings (1-5) as numpy array
random_ratings = np.random.randint(low=1, high=6, size=100000)_____no_output_____# create sampling strategy by count total ratings on random_ratings (dataframe)
unique, counts = np.unique(random_ratings, return_counts=True)
sampling_strategy = {}
for k, v in zip(unique, counts):
sampling_strategy[k] = v
print('sampling_strategy: {}'.format(sampling_strategy))sampling_strategy: {1: 20018, 2: 20082, 3: 19732, 4: 19981, 5: 20187}
rus = RandomUnderSampler(random_state=random_state,
sampling_strategy=sampling_strategy)
X_res, y_res = rus.fit_resample(X, y)
print('initial label: {}'.format(Counter(y)))
print('result label: {}'.format(Counter(y_res)))initial label: Counter({5.0: 299759, 4.0: 93654, 3.0: 49146, 1.0: 30883, 2.0: 24135})
result label: Counter({5.0: 20187, 2.0: 20082, 1.0: 20018, 4.0: 19981, 3.0: 19732})
# convert from numpy array back to pandas DataFrame
big_corpus = pd.DataFrame({'ratings': X_res[:, 0], 'reviews': X_res[:, 1]})
# set ratings column type as int32
big_corpus['ratings'] = big_corpus['ratings'].astype('int32')
big_corpus.info()
big_corpus.head()<class 'pandas.core.frame.DataFrame'>
RangeIndex: 100000 entries, 0 to 99999
Data columns (total 2 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 ratings 100000 non-null int32
1 reviews 99985 non-null object
dtypes: int32(1), object(1)
memory usage: 1.1+ MB
# export big_corpus to csv (100000)
big_corpus.to_csv('big_corpus.csv')_____no_output__________no_output_____
</code>
| {
"repository": "oonid/growth-hacking-with-nlp-sentiment-analysis",
"path": "create_dataset.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 62541,
"hexsha": "d0c83cc900b32359da68d8f1a319e1b2f220477a",
"max_line_length": 7686,
"avg_line_length": 46.2923760178,
"alphanum_fraction": 0.4758478438
} |
# Notebook from arodriguezca/NLP-dataset
Path: bert_embed_seq2seq.ipynb
<code>
#Libraries
import warnings
warnings.filterwarnings('ignore')
import pandas as pd
import numpy as np
import os
import re
import json
import string
import matplotlib.pyplot as plt
%matplotlib inline
import plotly.express as px
import plotly.graph_objects as go
from tqdm.autonotebook import tqdm
from functools import partial
import torch
import random
from sklearn.model_selection import train_test_split
!pip install transformers
from transformers import BertTokenizer, BertModel
#import spacyCollecting transformers
[?25l Downloading https://files.pythonhosted.org/packages/d8/b2/57495b5309f09fa501866e225c84532d1fd89536ea62406b2181933fb418/transformers-4.5.1-py3-none-any.whl (2.1MB)
[K |████████████████████████████████| 2.1MB 9.7MB/s
[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (1.19.5)
Collecting tokenizers<0.11,>=0.10.1
[?25l Downloading https://files.pythonhosted.org/packages/ae/04/5b870f26a858552025a62f1649c20d29d2672c02ff3c3fb4c688ca46467a/tokenizers-0.10.2-cp37-cp37m-manylinux2010_x86_64.whl (3.3MB)
[K |████████████████████████████████| 3.3MB 52.3MB/s
[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from transformers) (3.0.12)
Collecting sacremoses
[?25l Downloading https://files.pythonhosted.org/packages/75/ee/67241dc87f266093c533a2d4d3d69438e57d7a90abb216fa076e7d475d4a/sacremoses-0.0.45-py3-none-any.whl (895kB)
[K |████████████████████████████████| 901kB 54.9MB/s
[?25hRequirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from transformers) (2.23.0)
Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from transformers) (20.9)
Requirement already satisfied: importlib-metadata; python_version < "3.8" in /usr/local/lib/python3.7/dist-packages (from transformers) (3.10.1)
Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.7/dist-packages (from transformers) (4.41.1)
Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (2019.12.20)
Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (7.1.2)
Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (1.0.1)
Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (1.15.0)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2020.12.5)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (3.0.4)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2.10)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (1.24.3)
Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->transformers) (2.4.7)
Requirement already satisfied: typing-extensions>=3.6.4; python_version < "3.8" in /usr/local/lib/python3.7/dist-packages (from importlib-metadata; python_version < "3.8"->transformers) (3.7.4.3)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata; python_version < "3.8"->transformers) (3.4.1)
Installing collected packages: tokenizers, sacremoses, transformers
Successfully installed sacremoses-0.0.45 tokenizers-0.10.2 transformers-4.5.1
gpu_info = !nvidia-smi
gpu_info = '\n'.join(gpu_info)
if gpu_info.find('failed') >= 0:
print('Select the Runtime > "Change runtime type" menu to enable a GPU accelerator, ')
print('and then re-execute this cell.')
else:
print(gpu_info)
print(f'GPU available: {torch.cuda.is_available()}')
random.seed(10)Tue May 4 15:27:55 2021
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 465.19.01 Driver Version: 460.32.03 CUDA Version: 11.2 |
|-------------------------------+----------------------+----------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|===============================+======================+======================|
| 0 Tesla P100-PCIE... Off | 00000000:00:04.0 Off | 0 |
| N/A 35C P0 26W / 250W | 0MiB / 16280MiB | 0% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
+-----------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=============================================================================|
| No running processes found |
+-----------------------------------------------------------------------------+
GPU available: True
print(torch.cuda.is_available())
if torch.cuda.is_available():
device = torch.device("cuda")
else:
device = torch.device("cpu")
print("Using device:", device)True
Using device: cuda
</code>
## Vocabulary
This is useful only for the decoder; we get the vocab from the complete data_____no_output_____
<code>
df = pd.read_csv("data.csv")
df = df.sample(frac=1, random_state=100).reset_index(drop=True)
df.head()
# df = df.iloc[0:10,:]
text = []
for i in range(len(df)):
t = df.loc[i][6]
text.append((t, df.loc[i][5]))_____no_output_____df.head()_____no_output_____pad_word = "<pad>"
bos_word = "<s>"
eos_word = "</s>"
unk_word = "<unk>"
pad_id = 0
bos_id = 1
eos_id = 2
unk_id = 3
def normalize_sentence(s):
s = re.sub(r"([.!?])", r" \1", s)
s = re.sub(r"[^a-zA-Z.!?]+", r" ", s)
s = re.sub(r"\s+", r" ", s).strip()
return s
class Vocabulary:
def __init__(self):
self.word_to_id = {pad_word: pad_id, bos_word: bos_id, eos_word:eos_id, unk_word: unk_id}
self.word_count = {}
self.id_to_word = {pad_id: pad_word, bos_id: bos_word, eos_id: eos_word, unk_id: unk_word}
self.num_words = 4
def get_ids_from_sentence(self, sentence):
sentence = normalize_sentence(sentence)
sent_ids = [bos_id] + [self.word_to_id[word] if word in self.word_to_id \
else unk_id for word in sentence.split()] + \
[eos_id]
return sent_ids
def tokenized_sentence(self, sentence):
sent_ids = self.get_ids_from_sentence(sentence)
return [self.id_to_word[word_id] for word_id in sent_ids]
def decode_sentence_from_ids(self, sent_ids):
words = list()
for i, word_id in enumerate(sent_ids):
if word_id in [bos_id, eos_id, pad_id]:
# Skip these words
continue
else:
words.append(self.id_to_word[word_id])
return ' '.join(words)
def add_words_from_sentence(self, sentence):
sentence = normalize_sentence(sentence)
for word in sentence.split():
if word not in self.word_to_id:
# add this word to the vocabulary
self.word_to_id[word] = self.num_words
self.id_to_word[self.num_words] = word
self.word_count[word] = 1
self.num_words += 1
else:
# update the word count
self.word_count[word] += 1
vocab = Vocabulary()
for src, tgt in text:
vocab.add_words_from_sentence(src)
vocab.add_words_from_sentence(tgt)
print(f"Total words in the vocabulary = {vocab.num_words}")Total words in the vocabulary = 56347
</code>
## Create chunks for each publication_____no_output_____
<code>
# Every publication input will be mapped into a variable numbers of chunks (split by sentence) that are less than chunk_max_len
# These can then be batched by encoding strings, then padding them
chunk_max_len = 512
publication_ids = df['Id']
dataset_label = df['cleaned_label']
chunked_text = [[]] * len(df.index) # publication id x chunks - left in string format for flexibility in encoding
chunk_labels = [[]] * len(df.index) # publication id x chunk - if label in chunk, True else False
for i in range(len(df.index)):
chunked_text[i] = []
chunk_labels[i] = []
chunk = ''
for s in df['text'][i].split('.'):
# print(s)
new_chunk = chunk + s.strip()
if len(s)>0 and s[-1]!='.':
new_chunk += '. '
if len(new_chunk.split(' ')) > chunk_max_len:
# labels_per_chunk[i].append(True if df['dataset_label'][i] in chunk else False)
chunk_labels[i].append(1 if df['dataset_label'][i] in chunk else 0)
chunked_text[i].append(chunk)
chunk = s
else:
chunk = new_chunk
# labels_per_chunk[i].append(True if df['dataset_label'][i] in chunk else False)
chunk_labels[i].append(1 if df['dataset_label'][i] in chunk else 0)
chunked_text[i].append(chunk)
print(len(chunked_text[0]), chunked_text[0])
print(dataset_label[0])4 ["In its original form, the amyloid cascade hypothesis of Alzheimer's disease holds that fibrillar deposits of amyloid are an early, driving force in pathological events leading ultimately to neuronal death. Early clinicopathologic investigations highlighted a number of inconsistencies leading to an updated hypothesis in which amyloid plaques give way to amyloid oligomers as the driving force in pathogenesis. Rather than focusing on the inconsistencies, amyloid imaging studies have tended to highlight the overlap between regions that show early amyloid plaque signal on positron emission tomography and that also happen to be affected early in Alzheimer's disease. Recent imaging studies investigating the regional dependency between metabolism and amyloid plaque deposition have arrived at conflicting results, with some showing regional associations and other not. We extracted multimodal neuroimaging data from the Alzheimer's disease neuroimaging database for 227 healthy controls and 434 subjects with mild cognitive impairment. We analyzed regional patterns of amyloid deposition, regional glucose metabolism and regional atrophy using florbetapir ( 18 F) positron emission tomography, 18 F-fuordeoxyglucose positron emission tomography and T1 weighted magnetic resonance imaging, respectively. Specifically, we derived gray matter density and standardized uptake value ratios for both positron emission tomography tracers in 404 functionally defined regions of interest. We examined the relation between regional glucose metabolism and amyloid plaques using linear models. For each region of interest, correcting for regional gray matter density, age, education and disease status, we tested the association of regional glucose metabolism with (i) cortex-wide florbetapir uptake, (ii) regional (i. e. , in the same region of interest) florbetapir uptake and (iii) regional florbetapir uptake while correcting in addition for cortex-wide florbetapir uptake. P-values for each setting were Bonferroni corrected for 404 tests. Regions showing significant hypometabolism with increasing cortex-wide amyloid burden were classic Alzheimer's disease-related regions: the medial and lateral parietal cortices. The associations between regional amyloid burden and regional metabolism were more heterogeneous: there were significant hypometabolic effects in posterior cingulate, precuneus, and parietal regions but also significant positive associations in bilateral hippocampus and entorhinal cortex. However, after correcting for global amyloid burden, very few of the negative associations remained and the number of positive associations increased. Given the wide-spread distribution of amyloid plaques, if the canonical cascade hypothesis were true, we would expect wide-spread, cortical hypometabolism. Instead, cortical hypometabolism appears to be linked to global amyloid burden. Thus we conclude that regional fibrillar amyloid deposition has little to no association with regional hypometabolism. The amyloid cascade hypothesis of Alzheimer's disease, in its original, unmodified form, posits that the protein amyloid-β is the starting point for a series of pathogenic changes that lead from neuronal dysfunction and synapse loss to cell death (Hardy and Allsop, 1991; Hardy and Higgins, 1992). Particular weight is given, in the unmodified version of the hypothesis, to the large fibrillar aggregates of amyloid-β known as amyloid plaques. The link between amyloid-β, in some form, and Alzheimer's disease is unassailable. ", " Disease-causing mutations in the three genes that lead to autosomal dominant Alzheimer's disease have been shown to promote the formation of the putatively neurotoxic form of amyloid-β, a peptide of 42 amino acids (Suzuki et al, 1994; Scheuner et al. , 1996; Gomez-Isla et al. , 1999). While amyloid-β is, irrefutably, an initiating factor in Alzheimer's disease pathogenesis, the remainder of the amyloid cascade hypothesis is much less firmly established. Amyloid plaques are, along with tau-based neurofibrillary tangles, one of the pathologic hallmarks of Alzheimer's disease (Braak and Braak, 1991). They are large, abundant, and easily seen with basic microscopy stains and, as such, were initially assumed to have a key role in the pathogenic cascade (Hardy and Higgins, 1992). From the earliest days of clinicopathologic investigations, however, a number of glaring inconsistencies arose. Chief among these is the oft-replicated finding that there is little association between where amyloid plaques are found at autopsy and which brain regions were dysfunctional in the patient's clinical course (Price et al. , 1991; Arriagada et al. , 1992; Giannakopoulos et al. , 1997; Hardy and Selkoe, 2002). This discordance is most obvious in the entorhinal cortex and hippocampus. These medial temporal lobe structures, crucial to episodic memory function, are the first to fail clinically and the first to develop neurofibrillary tangle pathology. Amyloid plaque deposition, however, does not occur in these regions until relatively late in the course (Price et al. , 1991; Arriagada et al. , 1992; Giannakopoulos et al. , 1997). Conversely, other regions, like the medial prefrontal cortex, typically show abundant amyloid plaque pathology at autopsy despite being relatively functionally spared clinically (Price et al. , 1991; Arriagada et al. , 1992; Giannakopoulos et al. , 1997). As the field wrestled with these inconsistencies, evidence began to accrue suggesting that Aβ was still the key driver but that its pathogenic properties were related to smaller soluble aggregates of the peptide referred to as oligomers (Lambert et al. , 1998; Hartley et al. , 1999). These findings have allowed for an updated, reconciled version of the amyloid cascade hypothesis in which amyloid plaques give way to amyloid oligomers as the driving force in pathogenesis (Hardy and Selkoe, 2002). The advent of amyloid PET imaging should have reinforced this update to the hypothesis. The correlation between plaque quantity and distribution as measured with PET and plaque quantity and distribution at autopsy is extraordinarily high (Ikonomovic et al. , 2008; Hatsuta et al. , 2015). Unsurprisingly, therefore, imaging studies of Alzheimer's began to show many of the same patterns that the neuropathology literature had been documenting for the last several decades. After age 70, roughly 25% of healthy older controls without cognitive complaints or deficits on testing harbor a large burden of amyloid plaques on PET imaging (Rowe et al. , 2010; Chetelat et al. , 2013; Jack et al. , 2014). ", " The medial prefrontal cortex is among the first regions to show high signal on amyloid PET scans in healthy older controls despite remaining clinically unaffected even late into the course of Alzheimer's disease (Jack et al, 2008). Conversely, even late into the course of Alzheimer's disease cognitive symptoms, the medial temporal lobes tend to show little to no increased signal on amyloid PET (Jack et al. , 2008). Despite its role in re-introducing these decades-old arguments against the primacy of plaques in Alzheimer's disease pathogenesis, amyloid PET imaging has, oddly, seemed to have the opposite effect on the field. Rather than focusing on the inconsistencies, studies have tended to highlight the overlap between regions that show early amyloid plaque signal on PET and that happen to be affected early in Alzheimer's disease (Buckner et al. , 2005; Sperling et al. , 2009; Koch et al. , 2014). The PCC and the IPC are most commonly cited in this regard. The PCC and IPC form the posterior aspect of the brain's DMN, a set of functionally connected regions-that also includes the medial prefrontal cortex and medial temporal lobe structures-that relates to memory function and appears to be targeted early by Alzheimer's disease pathology (Raichle et al. , 2001; Greicius et al. , 2003; Greicius et al. , 2004; Shirer et al. , 2012). One highly cited early study in this vein pointed out the qualitative similarity between a resting-state fMRI map of the DMN, a map of glucose hypometabolism in Alzheimer's disease patients, and a map of amyloid deposition in Alzheimer's disease patients (Buckner et al. , 2005). This led to the oversimplified interpretation that amyloid plaque deposition occurs in the DMN and results in the dysfunction of this network. No attention was given to the findings, evident from the images, that Alzheimer's disease patients typically have normal metabolism in the medial prefrontal cortex despite having abundant amyloid deposition. Similarly, while the medial temporal lobe is a key component of the DMN and its metabolism is already reduced in the earliest clinical stages of Alzheimer's disease, the amyloid map in this study (as in most subsequent amyloid PET studies)\nshows no uptake in the hippocampus (Buckner et al. , 2005; Kemppainen et al. , 2006; Edison et al. , 2007; Jack et al. , 2008) , though with rare exceptions (Frisoni et al. , 2009; Sepulcre et al. , 2013). A few multimodal imaging studies using FDG PET and amyloid PET approached the question of whether local amyloid plaque deposition is correlated with local levels of glucose metabolism. These studies produced conflicting results with some showing an association between local amyloid plaque deposition and glucose hypometabolism in some brain regions (Engler et al. , 2006; Edison et al. , 2007; Cohen et al. , 2009; Lowe et al. , 2014) and others showing the absence of any correlation (Li et al. , 2008; Rabinovici et al. , 2010; Furst et al. , 2012). ", " Further work showed that the dependency may be more complex and relationship between plaques and metabolism may change depending on disease stages (Cohen et al, 2009) or brain regions (La Joie et al. , 2012). Discrepancies in the findings may originate from the different subject populations that were studied. For instance, Lowe et al. (2014) studied only healthy controls, while Furst et al. (2012) focused on AD subjects. A second source for the discrepancies may be the limited sample sizes of most studies: with the exception of Lowe et al. (2014) , previous studies comprised fewer than 100 subjects and the specific regional analysis within a single disease group did typically not exceed two dozen subjects (Engler et al. , 2006; Edison et al. , 2007; Li et al. , 2008; Cohen et al. , 2009; La Joie et al. , 2012). Moreover, many studies relied on a plain correlation analysis between the regional tracer intensities without correcting for cofounders such as age, sex, education and extent of amyloid pathology. Here we investigated the relationship between regional amyloid plaque deposition and regional glucose hypometabolism, using a large dataset comprising hundreds of subjects (healthy controls and patients with MCI) obtained from the ADNI (Alzheimer's disease neuroimaging initiative) database who were imaged with both amyloid PET ( 18 F-florbetapir PET) and FDG PET. "]
adni
</code>
## Create dataset
For each publication, it will return a tensor with all the chunks inside
Therefore, each pass of our bi-LSTM will work with one single publication (with all the chunks inside that publication)_____no_output_____
<code>
from transformers import BertModel, BertTokenizerFast
bert_model = BertModel.from_pretrained('bert-base-uncased').to(device)
bert_model.eval()
tokenizer = BertTokenizerFast.from_pretrained('bert-base-uncased')
_____no_output_____from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Dataset, DataLoader
class ChunkedDataset(Dataset):
"""
@author: Alexander Rodriguez
"""
def __init__(self, publication_ids, chunked_text, chunk_labels, dataset_label, device, tokenizer, bert_model):
"""
Args:
chunked_text: list of str, contains all the chunks
chunk_labels: list booleans, contain whether or not the label is in the chunks
dataset_label: string, same label for all chunks in the publication
device: cpu or cuda
"""
self.publication_ids = publication_ids
self.chunked_text = chunked_text
self.chunk_labels = chunk_labels
self.dataset_label = dataset_label
self.tokenizer = tokenizer
self.device = device
self.bert_model = bert_model
def __len__(self):
return len(self.publication_ids)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
return {"publication_ids":self.publication_ids[idx], "chunked_text":self.chunked_text[idx],
"chunk_labels":self.chunk_labels[idx], "dataset_label":self.dataset_label[idx]}
def collate_fn(data):
"""Creates mini-batch tensors for several publications
Return: A dictionary for each chunk (read below)
Each training observation will represent one chunk, therefore we have:
input_ids: the word ids from the Bert tokenizer
tensor shape (max_input_sequence_length,batch_size)
input_tensor: the Bert word embeddings for the sequence (chunk)
tensor shape (max_input_sequence_length,batch_size,bert_dim)
attention_mask: useful for knowing where the sequence ends
Each chunk has two labels:
chunk_labels: (list of 0/1) whether or not the chunk contains the label
output_ids: the ids that have to be predicted for the target sequence
tensor shape (max_output_sequence_length,batch_size)
Sequences are padded to the maximum length of mini-batch sequences (dynamic padding).
"""
chunked_text = []; chunk_labels = []; dataset_label = []
for publication in data:
# for chunk in publication:
chunked_text += [chunk for chunk in publication["chunked_text"] ]
chunk_labels += [chunk for chunk in publication["chunk_labels"] ]
# our dataset_label have to be repeated
dataset_label += [publication["dataset_label"] for _ in publication["chunk_labels"] ]
with torch.no_grad(): # needed for memory
t = tokenizer(chunked_text, padding=True, truncation=True, return_tensors="pt").to(device)
outputs = bert_model(**t)
bert_input_word_embeddings = outputs[0].permute(1,0,2)
del outputs
torch.cuda.empty_cache()
input_ids = t['input_ids'].permute(1,0)
attention_mask = t['attention_mask']
def encode(tgt):
tgt_ids = vocab.get_ids_from_sentence(tgt)
return tgt_ids
# We will pre-tokenize the dataset labels (output) and save in id lists for later use
output_ids = [encode(tgt) for tgt in dataset_label]
output_ids = [torch.LongTensor(e) for e in output_ids]
output_ids = pad_sequence(output_ids,padding_value=pad_id).to(device)
# "chunked_text":chunked_text,
# "dataset_label":dataset_label,
return {"input_ids":input_ids, "chunk_labels":chunk_labels, \
"output_ids":output_ids, "input_tensor":bert_input_word_embeddings, \
'attention_mask':attention_mask}_____no_output_____# do not use, this is only for debugging
# data = pd.read_csv("data.csv")
# with torch.no_grad():
# t = tokenizer(data['text'].tolist()[0:16], padding=True, truncation=True, return_tensors="pt").to(device)
# outputs = bert_model(**t)
# encoded_layers = outputs[0]
# del outputs
# torch.cuda.empty_cache()
_____no_output_____
</code>
## Seq2seq model
Uses Bert word embeddings
Makes two predictions for each chunk_____no_output_____
<code>
import torch.nn as nn
class Seq2seq(nn.Module):
def __init__(self, vocab, bert_dim = 300, emb_dim = 300, hidden_dim = 300, num_layers = 2, dropout=0.1):
super().__init__()
"""
@author: Alexander Rodriguez
bert_dim: dimension of Bert embeddings
emb_dim: dimension of our word embedding (used in decoder)
hidden_dim: dimension of our GRU hidden states
"""
self.bert_dim = bert_dim
self.num_words = vocab.num_words
self.emb_dim = emb_dim
self.hidden_dim = hidden_dim
self.num_layers = num_layers
# neural layers
self.embedding_layer = nn.Linear(1,self.emb_dim)
self.encoder = nn.GRU(
self.bert_dim,self.hidden_dim,self.num_layers,bidirectional=True,dropout=dropout
)
self.linear_hidden = nn.Linear(self.hidden_dim,self.hidden_dim)
self.decoder = nn.GRU(
self.emb_dim,self.hidden_dim,self.num_layers,bidirectional=False,dropout=dropout
)
self.output_layer = nn.Linear(self.hidden_dim,self.num_words)
self.classifier = nn.Linear(self.hidden_dim, 1)
self.attn_softmax = nn.Softmax(1)
def encode(self, input_embeddings, attention_mask):
"""Encode the source batch using a bidirectional GRU encoder.
Args:
input_embeddings: Bert embeddings with shape (max_input_sequence_length,
batch_size,bert_dim), e.g. torch.Size([512, 16, 768])
attention_mask: attention mask obtained from Bert tokenizer
Returns:
A tuple with three elements:
encoder_output: The output hidden representation of the encoder
with shape (max_input_sequence_length, batch_size, hidden_size).
Can be obtained by adding the hidden representations of both
directions of the encoder bidirectional GRU.
encoder_mask: A boolean tensor with shape (max_input_sequence_length,
batch_size) indicating which encoder outputs correspond to padding
tokens. Its elements should be True at positions corresponding to
padding tokens and False elsewhere.
encoder_hidden: The final hidden states of the bidirectional GRU
(after a suitable projection) that will be used to initialize
the decoder. This should be a tensor h_n with shape
(num_layers, batch_size, hidden_size). Note that the hidden
state returned by the bi-GRU cannot be used directly. Its
initial dimension is twice the required size because it
contains state from two directions.
"""
batch_size = input_embeddings.shape[1]
dtype = torch.float
# gru pass
encoder_output, encoder_hidden = self.encoder(input_embeddings) # seq_len first
# sum embeddings from the two GRUs
encoder_output = encoder_output[:,:,:self.hidden_dim] + encoder_output[:,:,self.hidden_dim:]
# hidden embedding
encoder_hidden = encoder_hidden.view(self.num_layers, 2, batch_size, self.hidden_dim)
encoder_hidden = encoder_hidden.sum(1) # sum over bi-directional, keep number of layers
encoder_hidden = self.linear_hidden(encoder_hidden)
encoder_mask = attention_mask.permute(1,0)
return encoder_output, encoder_mask, encoder_hidden
def decode(self, decoder_input, last_hidden, encoder_output, encoder_mask, use_classifier=False):
"""Run the decoder GRU for one decoding step from the last hidden state.
Args:
decoder_input: An integer tensor with shape (1, batch_size) containing
the subword indices for the current decoder input.
last_hidden: A pair of tensors h_{t-1} representing the last hidden
state of the decoder, each with shape (num_layers, batch_size,
hidden_size). For the first decoding step the last_hidden will be
encoder's final hidden representation.
encoder_output: The output of the encoder with shape
(max_src_sequence_length, batch_size, hidden_size).
encoder_mask: The output mask from the encoder with shape
(max_src_sequence_length, batch_size). Encoder outputs at positions
with a True value correspond to padding tokens and should be ignored.
use_classifier: (boolean) Whether or not we should classify
Returns:
A tuple with three elements:
logits: A tensor with shape (batch_size,
vocab_size) containing unnormalized scores for the next-word
predictions at each position.
decoder_hidden: tensor h_n with the same shape as last_hidden
representing the updated decoder state after processing the
decoder input.
attention_weights: This will be implemented later in the attention
model, but in order to maintain compatible type signatures, we also
include it here. This can be None or any other placeholder value.
"""
# shared layer
dtype = torch.float
input = decoder_input.type(dtype)
input = self.embedding_layer(input.permute(1,0).unsqueeze(2))
# attention weights
max_src_sequence_length = encoder_output.shape[0]
batch_size = encoder_output.shape[1]
decoder_output, decoder_hidden = self.decoder(input.permute(1,0,2),last_hidden)
# use the decoder output to get attention weights via dot-product
attention_weights = torch.empty((batch_size,max_src_sequence_length),device=device,dtype=dtype)
# function for batch dot product taken from https://discuss.pytorch.org/t/dot-product-batch-wise/9746/12
def bdot(a, b):
B = a.shape[0]
S = a.shape[1]
return torch.bmm(a.view(B, 1, S), b.view(B, S, 1)).reshape(-1)
for i in range(max_src_sequence_length):
attention_weights[:,i] = bdot(decoder_output.squeeze(0),encoder_output[i,:,:])
# softmax
attention_weights = self.attn_softmax(attention_weights)
# get context vector
context = torch.mul(encoder_output.permute(1,0,2), attention_weights.unsqueeze(2))
context = context.sum(1)
decoder_output = decoder_output.squeeze(0) + context
# gru pass
logits = self.output_layer(decoder_output)
# use the attention context as input to the classifier along with
# hidden states from encoder
if use_classifier:
out_classifier = self.classifier(last_hidden[0] + last_hidden[1] + context)
else:
out_classifier = torch.tensor(0.).to(device)
return logits, decoder_hidden, attention_weights, out_classifier
def compute_loss(self, input_tensor, attention_mask, target_seq, target_binary):
"""Run the model on the source and compute the loss on the target.
Args:
input_tensor & attention_mask:
Coming from Bert, directly go to encoder
See encoder documentation for details
target_seq: An integer tensor with shape (max_target_sequence_length,
batch_size) containing subword indices for the target sentences.
target_binary: Binary indicator for the chunk, indicates if
the label is in that chunk (it's a list)
NOTE: this is used as a mask for the sequence loss
Returns:
A scalar float tensor representing cross-entropy loss on the current batch
divided by the number of target tokens in the batch.
Many of the target tokens will be pad tokens. You should mask the loss
from these tokens using appropriate mask on the target tokens loss.
"""
# loss criterion, ignoring pad id tokens
criterion = nn.CrossEntropyLoss(ignore_index=pad_id,reduction='none')
criterion_classification = nn.BCEWithLogitsLoss(reduction='sum')
# call encoder
encoder_output, encoder_mask, encoder_hidden = self.encode(input_tensor, attention_mask)
# decoder
max_target_sequence_length = target_seq.shape[0]
last_hidden = encoder_hidden
total_loss = torch.tensor(0.).to(device)
target_binary = torch.tensor(target_binary,dtype=torch.float).to(device)
for i in range(max_target_sequence_length-1):
decoder_input = target_seq[[i],]
# do a forward pass over classifier only for the first
use_classifier = True if i==0 else False
logits, decoder_hidden, attention_weights, out_classifier = self.decode(decoder_input, last_hidden, encoder_output, encoder_mask, use_classifier)
# target_binary serves as a mask for the loss
# we only care about the predicted sequence when we should
total_loss += (criterion(logits,target_seq[i+1,]) * target_binary).sum()
# get classification loss only for the first one (which is where out_classifier is meaningful)
if use_classifier:
class_loss = criterion_classification(out_classifier.view(-1),target_binary)
# now we have to make last_hidden to be hidden embedding of gru
last_hidden = decoder_hidden
# denominator of loss
total_target_tokens = torch.sum(target_seq != pad_id).cpu()
return total_loss/total_target_tokens + class_loss
_____no_output_____import tqdm
def train(model, data_loader, num_epochs, model_file, learning_rate=0.0001):
"""Train the model for given number of epochs and save the trained model in
the final model_file.
"""
decoder_learning_ratio = 5.0
encoder_parameter_names = ['embedding_layer','encoder','linear_hidden']
encoder_named_params = list(filter(lambda kv: any(key in kv[0] for key in encoder_parameter_names), model.named_parameters()))
decoder_named_params = list(filter(lambda kv: not any(key in kv[0] for key in encoder_parameter_names), model.named_parameters()))
encoder_params = [e[1] for e in encoder_named_params]
decoder_params = [e[1] for e in decoder_named_params]
optimizer = torch.optim.AdamW([{'params': encoder_params},
{'params': decoder_params, 'lr': learning_rate * decoder_learning_ratio}], lr=learning_rate)
clip = 50.0
for epoch in tqdm.notebook.trange(num_epochs, desc="training", unit="epoch"):
# print(f"Total training instances = {len(train_dataset)}")
# print(f"train_data_loader = {len(train_data_loader)} {1180 > len(train_data_loader)/20}")
with tqdm.notebook.tqdm(
data_loader,
desc="epoch {}".format(epoch + 1),
unit="batch",
total=len(data_loader)) as batch_iterator:
model.train()
total_loss = 0.0
for i, batch_data in enumerate(batch_iterator, start=1):
input_tensor = batch_data["input_tensor"]
attention_mask = batch_data["attention_mask"]
output_ids = batch_data["output_ids"]
target_binary = batch_data["chunk_labels"]
optimizer.zero_grad()
loss = model.compute_loss(input_tensor, attention_mask, output_ids,target_binary)
total_loss += loss.item()
loss.backward()
# Gradient clipping before taking the step
_ = nn.utils.clip_grad_norm_(model.parameters(), clip)
optimizer.step()
batch_iterator.set_postfix(mean_loss=total_loss / i, current_loss=loss.item())
# Save the model after training
torch.save(model.state_dict(), model_file)_____no_output_____# Create the DataLoader for all publications
dataset = ChunkedDataset(publication_ids[0:2000], chunked_text[0:2000], chunk_labels[0:2000], dataset_label[0:2000], device, tokenizer, bert_model)
batch_size = 4 # this means it's 4 publications per batch ---too large may not fit in GPU memory
data_loader = DataLoader(dataset=dataset, batch_size=batch_size,
shuffle=True, collate_fn=collate_fn)_____no_output_____# You are welcome to adjust these parameters based on your model implementation.
num_epochs = 10
model = Seq2seq(vocab,bert_dim=768,emb_dim=256,hidden_dim=256,num_layers=2).to(device)
train(model, data_loader, num_epochs, "bert_word_seq2seq_model_2.pt")
# Download the trained model to local for future use
_____no_output_____x = next(iter(data_loader))
print(x["output_ids"])
tensor([[ 1, 1, 1, 1, 1, 1, 1, 1],
[ 32, 32, 180, 180, 304, 304, 3313, 3313],
[ 33, 33, 2, 2, 305, 305, 73, 73],
[ 42, 42, 0, 0, 2073, 2073, 2708, 2708],
[ 43, 43, 0, 0, 2074, 2074, 3314, 3314],
[ 44, 44, 0, 0, 2075, 2075, 31, 31],
[ 180, 180, 0, 0, 2, 2, 3315, 3315],
[ 2, 2, 0, 0, 0, 0, 489, 489],
[ 0, 0, 0, 0, 0, 0, 2, 2]], device='cuda:0')
</code>
## Evaluation
This come is from Alex Wang, I haven't checked it._____no_output_____Load model_____no_output_____
<code>
model = Seq2seq(vocab,bert_dim=768,emb_dim=256,hidden_dim=256,num_layers=2).to(device)
model.load_state_dict(torch.load("bert_word_seq2seq_model_2.pt"))_____no_output_____print(chunked_text[0])["Introduction: The heterogeneity of behavioral variant frontotemporal dementia (bvFTD) calls for multivariate imaging biomarkers. Methods: We studied a total of 148 dementia patients from the Feinstein Institute (Center-A: 25 bvFTD and 10 Alzheimer's disease), Technical University of Munich (Center-B: 44 bvFTD and 29 FTD language variants), and Alzheimer's Disease Neuroimaging Initiative (40 Alzheimer's disease subjects). To identify the covariance pattern of bvFTD (behavioral variant frontotemporal dementiarelated pattern [bFDRP]), we applied principal component analysis to combined 18F-fluorodeoxyglucose-positron emission tomography scans from bvFTD and healthy subjects. The phenotypic specificity and clinical correlates of bFDRP expression were assessed in independent testing sets. The bFDRP was identified in Center-A data (24. 1% of subject ! voxel variance; P ,. 001), reproduced in Center-B data (P ,. 001), and independently validated using combined testing data (receiver operating characteristics-area under the curve 5 0. 97; P ,. 0001). The expression of bFDRP was specifically elevated in bvFTD patients (P ,. 001) and was significantly higher at more advanced disease stages (P 5. 035:duration; P ,. 01:severity). Discussion: The bFDRP can be used as a quantitative imaging marker to gauge the underlying disease process and aid in the differential diagnosis of bvFTD. Behavioral variant frontotemporal dementia; Spatial covariance pattern; Differential diagnosis; Quantitative imaging biomarker; FDG PET Dr. Eidelberg serves on the scientific advisory board and has received honoraria from The Michael J. Fox Foundation for Parkinson's Research; is listed as coinventor of patents, re: Markers for use in screening patients for nervous system dysfunction and a method and apparatus for using same, without financial gain; and has received research support from the NIH (NINDS, NIDCD, and NIAID) and the Dana Foundation. All other authors have declared that no conflict of interest exists. 1 Some of the data used in preparation of this article were obtained from the Alzheimer's Disease Neuroimaging Initiative (ADNI) database (adni. lo ni. usc. edu). As such, the investigators within the ADNI contributed to the design and implementation of ADNI and/or provided data but did not participate in analysis or writing of this report. A complete listing of ADNI investigators can be found at Behavioral variant frontotemporal dementia (bvFTD) is the most common clinical phenotype of frontotemporal lobar degeneration (FTLD), a leading cause of dementia in midlife [1]. This syndrome is characterized by progressive impairment of personal and social behavior, as well as emotional, language, and executive functions [1]. However, similar symptoms are also seen in various other psychiatric and neurodegenerative disorders, particularly Alzheimer's disease (AD), making accurate diagnosis of bvFTD challenging [1] , especially at early stages of the disease [2]. Overall, the accuracy of clinical diagnosis of dementia has been improved with the study of 18 F-fluorodeoxyglucose (FDG) positron emission tomography (PET) brain scans [3] , as suggested by the diagnostic criteria for bvFTD [4] and AD [5]. ", ' However, the considerable individual variability in neuroanatomical involvement seen in bvFTD patients [6] [7] [8] restricts the use of regional and univariate analytical approaches for early and accurate detection of this disorder [2, 7, 9] , calling for the identification and standardization of multivariate quantitative imaging biomarkers [10, 11] for this dementia syndrome [12] A multivariate brain mapping approach, based on principal component analysis (PCA), has been applied to FDG PET data for several neurodegenerative disorders to identify disease-related spatial covariance patterns [13] [14] [15]. The expression of such metabolic signatures [10, 13] can be quantified in the scan data of prospective individual subjects [14, 15] and thus has been used to aid in early differential diagnosis, predict disease progression, and track response to therapy [13]. Nonetheless, to date, a metabolic covariance pattern has not been determined for bvFTD. The main objective of this study was to identify and characterize the bvFTD metabolic covariance pattern (bvFTD-related pattern [bFDRP] ) and assess its performance as an imaging marker for bvFTD. Our basic hypothesis was that bFDRP can classify independent bvFTD patients from healthy controls. Specifically, we identified bFDRP in a North American sample, cross-validated its reproducibility in a pathologyconfirmed European sample, and assessed its clinical correlates and classification performance for early-stage dementia. ']
print(sent)National Education Longitudinal Study
def predict_greedy(model, sentence, max_length=100):
"""Make predictions for the given input using greedy inference.
Args:
model: A sequence-to-sequence model.
sentence: A input string.
max_length: The maximum length at which to truncate outputs in order to
avoid non-terminating inference.
Returns:
Model's predicted greedy response for the input, represented as string.
"""
# You should make only one call to model.encode() at the start of the function,
# and make only one call to model.decode() per inference step.
with torch.no_grad(): # needed for memory
t = tokenizer(sentence, padding=True, truncation=True, return_tensors="pt").to(device)
outputs = bert_model(**t)
bert_input_word_embeddings = outputs[0].permute(1,0,2)
del outputs
torch.cuda.empty_cache()
input_ids = t['input_ids'].permute(1,0)
attention_mask = t['attention_mask']
model.eval()
model.encode(bert_input_word_embeddings,attention_mask)
encoder_output, encoder_mask, encoder_hidden = model.encode(bert_input_word_embeddings, attention_mask)
last_hidden = encoder_hidden
start = bos_id
sent = [start]
i = 0
while start != eos_id and i < 100:
use_classifier = True if i==0 else False
start = torch.unsqueeze(torch.tensor(start).cuda(), 0)
logits, decoder_hidden, attention_weights, out_classifier = model.decode(torch.unsqueeze(torch.tensor(start).cuda(), 0), last_hidden, encoder_output, encoder_mask, use_classifier)
start = torch.argmax(logits[0], 0)
last_hidden = decoder_hidden
sent.append(start.item())
i += 1
if use_classifier:
if out_classifier < -1:
return False
sent = vocab.decode_sentence_from_ids(sent)
return sent
#predictions = []
#for i in range(100):
# temp = []
# for j in range(len(chunked_text[i])):
# a = predict_greedy(model, chunked_text[i][j])
# temp.append(a)
# predictions.append(temp)
# print(dataset_label[i])
# print(temp)
score = 0
def jaccard(str1, str2):
a = set(str1.lower().split())
b = set(str2.lower().split())
c = a.intersection(b)
return float(len(c)) / (len(a) + len(b) - len(c))
_____no_output_____predictions[40]_____no_output__________no_output__________no_output_____def predict_beam(model, sentence, k=3, max_length=100, thresh=-9999):
"""Make predictions for the given inputs using beam search.
Args:
model: A sequence-to-sequence model.
sentence: An input sentence, represented as string.
k: The size of the beam.
max_length: The maximum length at which to truncate outputs in order to
avoid non-terminating inference.
Returns:
A list of k beam predictions. Each element in the list should be a string
corresponding to one of the top k predictions for the corresponding input,
sorted in descending order by its final score.
"""
# Implementation tip: once an eos_token has been generated for any beam,
# remove its subsequent predictions from that beam by adding a small negative
# number like -1e9 to the appropriate logits. This will ensure that the
# candidates are removed from the beam, as its probability will be very close
# to 0. Using this method, uou will be able to reuse the beam of an already
# finished candidate
# Implementation tip: while you are encouraged to keep your tensor dimensions
# constant for simplicity (aside from the sequence length), some special care
# will need to be taken on the first iteration to ensure that your beam
# doesn't fill up with k identical copies of the same candidate.
# You are welcome to tweak alpha
alpha = 0.9
with torch.no_grad(): # needed for memory
t = tokenizer(sentence, padding=True, truncation=True, return_tensors="pt").to(device)
outputs = bert_model(**t)
bert_input_word_embeddings = outputs[0].permute(1,0,2)
del outputs
torch.cuda.empty_cache()
input_ids = t['input_ids'].permute(1,0)
attention_mask = t['attention_mask']
model.eval()
model.encode(bert_input_word_embeddings,attention_mask)
encoder_output, encoder_mask, encoder_hidden = model.encode(bert_input_word_embeddings, attention_mask)
last_hidden = encoder_hidden
start = bos_id
sent = [start]
i = 0
start = bos_id
beams = []
start = torch.unsqueeze(torch.tensor(start).cuda(), 0)
logits, decoder_hidden, attention_weights, out_classifier = model.decode(torch.unsqueeze(torch.tensor(start).cuda(), 0), last_hidden, encoder_output, encoder_mask, 1)
if out_classifier < -2:
return False
out = torch.log_softmax(logits[0], 0)
values, start = torch.topk(out, k, 0)
for i in range(len(values)):
# Each beam contains the log probs at its first index and the hidden states at its last index
beams.append([values[i], start[i].item(), decoder_hidden])
generation = []
i = 0
while i < k:
curr = []
for j in beams:
start = torch.unsqueeze(torch.tensor(j[-2]).cuda(), 0)
logits, decoder_hidden, attention_weights, out_classifier = model.decode(torch.unsqueeze(torch.tensor(start).cuda(), 0), j[-1], encoder_output, encoder_mask, 0)
out = torch.log_softmax(logits[0], 0)
values, start = torch.topk(out, k, 0)
for z in range(len(values)):
temp = j.copy()
temp[0] = values[z] + temp[0]
temp.insert(-1, start[z].item())
temp[-1] = decoder_hidden
curr.append(temp)
curr = sorted(curr,reverse=True, key=lambda x: x[0])
curr = curr[0:k - i]
beams = []
for j in curr:
if j[-2] == eos_id or len(j) > 20:
generation.append(j[:-1])
i +=1
else:
beams.append(j)
final = []
generation = sorted(generation, reverse=True, key=lambda x: x[0]/(len(x)-1)**alpha)
#for i in generation:
# if i[0].item() > thresh:
final.append(vocab.decode_sentence_from_ids(generation[0][1:]).lower())
return final
_____no_output_____predictions = []
for i in range(2000):
temp = []
for j in chunked_text[i]:
x = predict_beam(model, j)
if x:
temp.append(x[0])
predictions.append(temp)_____no_output_____print(len(predictions))2000
score = 0
for i in range(2000):
for j in predictions[i]:
found = False
if jaccard(df.loc[i][5], j) > 0.5:
score += 1
found = True
break
print("max accuracy")
print(score/2000)max accuracy
0.7275
print(df.loc[5][5])adni
testing = {}
for i in range(0, len(predictions)):
if publication_ids[i] not in testing.keys():
pred = predictions[i]
testing[publication_ids[i]] = (pred, [df.loc[i][5]])
else:
testing[publication_ids[i]][1].append(df.loc[i][5])_____no_output_____print(len(testing.keys()))1761
tp = 0
fp = 0
fn = 0
for i in testing.values():
prediction = set(i[0])
cop = prediction.copy()
true_pred = i[1].copy()
check = False
#check exact match first
for j in prediction:
if j in true_pred:
tp += 1
true_pred.remove(j)
cop.remove(j)
#then check rest for jaccard score
for j in cop:
found = False
removal = 0
for k in true_pred:
if jaccard(j, k) >= 0.5:
found = True
removal = k
break
if found:
tp += 1
true_pred.remove(removal)
else:
fp += 1
fn += len(true_pred)_____no_output_____
</code>
TRAINING PERFORMANCE_____no_output_____
<code>
print("training performance")
print("micro F score")
print(fp)
print(fn)
print(tp/(tp + 1/2*(fp+fn)))
print("accuracy")
print(tp/(tp+fn))training performance
micro F score
383
567
0.7510482180293501
accuracy
0.7165
print(len(df))3284
predictions = []
for i in range(2000, 3000):
temp = []
for j in chunked_text[i]:
x = predict_beam(model, j)
if x:
temp.append(x[0])
predictions.append(temp)_____no_output_____print(predictions)[['adni'], ['adni'], ['adni', 'adni'], ['trends in international mathematics and science study'], ['adni'], [], ['adni'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['baltimore longitudinal study of aging'], ['coastal change analysis program'], ['census change agriculture', 'agricultural resource management survey'], ['adni', 'adni'], ['genome sequence of sars cov', 'covid open study of'], ['early childhood longitudinal study'], ['adni'], [], ['adni'], ['adni'], ['agricultural resource management survey', 'census of agriculture'], ['adni s disease neuroimaging initiative adni'], ['early childhood longitudinal study'], ['adni s disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging', 'early childhood longitudinal study'], ['national education longitudinal'], ['adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['covid open research dataset'], ['early childhood longitudinal study'], ['adni', 'adni'], ['adni'], ['adni'], ['agricultural resource management survey'], ['national education longitudinal study', 'national education longitudinal'], ['coastal change analysis program'], [], ['early childhood longitudinal study'], ['adni'], ['baccalaureate and beyond study and'], ['adni', 'adni'], ['adni'], ['covid open study mathematics'], ['baltimore of study of aging blsa'], ['adni', 'adni'], ['adni', 'adni'], ['trends in international mathematics and science study'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['genome'], ['adni'], [], ['census of agriculture', 'census of agriculture'], [], ['adni'], ['adni', 'adni'], [], [], ['adni s disease neuroimaging initiative adni'], [], ['adni'], ['adni', 'adni'], [], ['adni'], ['adni'], ['adni'], ['trends in international mathematics and science study'], ['adni', 'adni', 'adni'], [], ['adni', 'adni'], ['adni', 'adni'], ['adni'], ['north american breeding bird survey'], ['adni'], ['adni', 'adni'], ['adni'], ['adni', 'adni'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], ['adni'], ['baltimore longitudinal study of aging'], ['adni s disease neuroimaging initiative adni', 'adni'], ['trends in international mathematics and science study'], ['adni'], ['adni'], ['adni', 'adni'], ['adni', 'adni s disease neuroimaging initiative adni'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['agricultural resource management survey'], ['trends for international mathematics'], ['adni'], ['adni'], ['baltimore'], ['early childhood longitudinal study'], ['trends in international mathematics and science study'], ['national education longitudinal study'], ['trends in international mathematics and science study'], ['adni'], ['adni', 'adni'], ['genome sequence of sars'], ['adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['north american breeding bird survey', 'north resource breeding bird survey'], ['adni', 'adni'], ['adni', 'adni'], ['adni'], ['census of agriculture'], ['adni', 'adni'], ['early childhood longitudinal study'], ['trends in international mathematics and science study'], ['early childhood longitudinal study'], ['adni'], ['adni'], ['agricultural resource management survey'], ['national education longitudinal study'], ['adni', 'adni'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['covid open research dataset'], [], ['baccalaureate and beyond study and'], ['early childhood longitudinal study'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], [], ['adni'], [], ['adni'], ['trends in international mathematics and science adult competencies'], [], ['adni'], ['adni', 'adni'], ['baltimore longitudinal study of aging blsa'], ['north american breeding bird survey'], ['north american breeding bird survey', 'north american breeding bird survey'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['early childhood longitudinal study'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['genome sequence of sars cov'], ['national childhood longitudinal'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], [], ['national longitudinal longitudinal study'], ['adni'], ['early childhood longitudinal study'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni'], ['baltimore longitudinal study of aging blsa'], ['trends in international mathematics and science study'], ['adni'], ['trends in international mathematics and science study'], ['adni'], ['education education longitudinal study'], ['adni', 'adni'], ['adni', 'adni'], ['trends and international mathematics and science study'], ['trends in international mathematics and science study'], ['adni'], ['adni', 'early childhood study of aging adni'], ['adni'], ['north american breeding bird survey'], ['adni'], ['adni'], [], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], [], ['adni s disease neuroimaging initiative adni'], ['coastal of of'], ['census of agriculture'], ['baccalaureate and beyond study and'], [], ['adni'], ['adni'], ['early childhood longitudinal study'], ['beginning postsecondary students study and'], ['adni'], ['adni'], [], ['baccalaureate and beyond'], ['program for the of assessment'], ['adni'], ['north american breeding bird survey'], ['survey of doctorate recipients'], ['trends in international mathematics and science study', 'trends in international mathematics and science study', 'trends in international mathematics and science study'], ['national education longitudinal study'], ['trends in international mathematics and science'], ['coastal change analysis program'], ['adni', 'adni'], [], ['national education longitudinal study'], ['trends in international mathematics and science study'], ['adni'], ['adni', 'adni'], ['adni'], ['adni', 'adni'], ['adni longitudinal study neuroimaging aging adni'], ['adni s disease neuroimaging initiative adni', 'adni s disease neuroimaging initiative adni'], ['early childhood longitudinal study'], ['baltimore longitudinal study of aging blsa'], ['early childhood longitudinal study', 'early childhood longitudinal study'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['survey of doctorate recipients'], ['beginning postsecondary students'], ['trends in international mathematics and science study'], ['adni'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['genome'], ['baltimore longitudinal study of aging blsa'], ['beginning postsecondary students study and'], ['adni'], ['national education longitudinal study'], ['adni'], ['adni', 'adni'], ['adni'], ['beginning postsecondary students study and'], ['adni', 'adni'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['ibtracs'], ['alzheimer in disease neuroimaging initiative adni', 'adni'], ['agricultural resource management survey'], ['adni', 'adni'], [], ['adni'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], ['early childhood longitudinal study', 'early childhood longitudinal study'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['adni'], ['trends of international mathematics'], ['adni', 'coastal'], ['trends in international mathematics and science study'], ['adni'], ['national'], ['adni'], ['trends in international mathematics and science study'], ['covid open research dataset'], ['adni'], ['trends in international mathematics and science study'], ['genome world of sars'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni s disease neuroimaging initiative adni'], ['adni'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['adni'], [], ['survey of doctorate recipients'], ['survey of doctorate recipients'], ['baltimore', 'adni'], ['agricultural resource management survey'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging'], ['trends in international mathematics and science study'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging'], ['survey and on survey and science', 'national in international'], ['adni s disease neuroimaging initiative adni'], ['genome sequence of sars'], ['trends in international mathematics and science study'], ['survey of doctorate recipients'], ['adni', 'adni'], ['baltimore longitudinal study of aging'], ['adni'], ['adni', 'adni'], ['adni', 'adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging'], ['early childhood longitudinal study'], ['trends in international mathematics and science study'], ['adni'], ['adni'], ['trends for international mathematics assessment science adult competencies'], ['adni'], ['adni'], ['covid open research dataset'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['adni'], ['survey of doctorate recipients'], ['adni'], ['baltimore'], ['survey of doctorate recipients', 'survey of doctorate recipients'], ['early childhood longitudinal study'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], ['adni'], ['adni', 'adni', 'adni of disease of initiative adni'], ['agricultural resource management survey'], ['adni'], ['beginning postsecondary students', 'survey of doctorate recipients', 'baccalaureate childhood students', 'beginning postsecondary students study'], ['early childhood longitudinal study'], ['trends in international mathematics and science study', 'early childhood longitudinal study'], ['early childhood longitudinal study'], ['early childhood longitudinal study'], ['early childhood longitudinal study'], ['agricultural resource management survey'], ['adni', 'adni', 'adni'], ['survey of doctorate recipients'], ['adni', 'adni', 'adni'], ['baltimore longitudinal study of aging blsa'], ['adni longitudinal disease neuroimaging initiative adni'], ['survey of earned doctorates'], ['adni'], ['survey of doctorate recipients'], [], [], ['adni', 'adni'], ['adni', 'adni'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], ['early childhood longitudinal study'], ['trends in international mathematics and science study', 'trends in international mathematics and science study'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['adni'], ['adni'], ['adni'], ['adni'], ['baccalaureate and beyond study and'], ['trends in international mathematics and science study'], ['adni'], ['census of agriculture'], ['our'], ['baltimore longitudinal study of aging blsa'], [], ['genome sequence of sars cov'], ['adni'], ['adni'], ['adni'], ['adni'], ['adni', 'adni'], ['baltimore longitudinal study of initiative', 'adni'], ['adni', 'adni'], [], ['national education students'], ['adni'], [], ['covid open research dataset'], ['agricultural resource management survey'], ['early childhood longitudinal study'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['census of agriculture'], ['baltimore longitudinal study of aging blsa'], ['world ocean database'], ['adni'], ['adni', 'adni'], ['national education longitudinal study'], ['adni', 'adni'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging blsa'], ['adni', 'adni'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['adni'], ['adni'], [], [], [], ['adni'], [], ['adni s disease neuroimaging initiative adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['national education students study'], [], ['north american breeding bird survey'], ['national education longitudinal study'], ['baltimore'], ['early childhood longitudinal study'], ['adni'], ['adni'], ['adni', 'adni'], ['adni', 'adni s disease neuroimaging initiative adni'], [], ['adni', 'adni'], ['baltimore longitudinal disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging blsa'], [], ['adni'], ['agricultural resource management survey', 'agricultural resource management survey', 'agricultural resource management survey'], [], [], ['baltimore longitudinal study of aging'], ['genome sequence of sars cov adni'], ['trends in international mathematics assessment science study competencies', 'program for international mathematics assessment science adult competencies'], ['national education longitudinal', 'beginning childhood longitudinal'], ['baccalaureate postsecondary beyond study and'], ['coastal change analysis survey'], ['baltimore longitudinal study of aging'], ['trends in international mathematics and science study', 'trends in international mathematics and science study'], ['adni', 'adni'], ['baltimore longitudinal study study aging'], ['trends in international mathematics and science study'], ['genome open of sars'], ['trends in international mathematics and science study'], ['genome sequence of sars cov'], ['adni'], ['trends in international mathematics and science study', 'trends in international mathematics and science study'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['adni'], ['adni'], ['baltimore longitudinal disease neuroimaging initiative adni'], ['national education longitudinal'], ['early childhood longitudinal study'], ['baltimore longitudinal study of aging blsa'], [], ['baltimore longitudinal study of aging'], ['adni'], ['trends in international mathematics and science study'], ['adni'], ['north american breeding bird survey'], ['trends in international mathematics and science study'], ['adni'], ['adni'], ['national of doctorate recipients'], ['early childhood longitudinal study'], ['adni s disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['adni', 'adni'], ['adni'], [], ['genome sequence of sars'], ['adni'], [], [], ['adni'], ['adni of of', 'early childhood study study'], ['adni'], ['census of agriculture', 'agricultural resource management survey'], ['north american breeding bird survey'], [], ['adni'], ['adni', 'adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], ['adni', 'ibtracs'], ['ibtracs'], ['adni', 'adni'], [], ['baltimore longitudinal study of aging blsa'], ['census of agriculture', 'agricultural resource management survey'], ['adni', 'adni'], ['adni'], ['adni'], ['baltimore longitudinal study bird'], ['survey of doctorate recipients', 'survey of doctorate recipients'], ['adni'], ['adni'], [], ['national education study'], ['adni'], [], ['adni'], ['adni'], ['beginning postsecondary students study and'], ['north american breeding bird survey'], ['adni', 'adni'], ['adni'], [], ['our open international sars'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni'], ['adni'], ['adni'], ['baltimore'], ['adni', 'adni'], ['adni'], ['genome cov of sars'], ['adni', 'adni'], ['adni'], ['adni'], ['adni', 'adni', 'adni'], ['beginning postsecondary students study'], ['baltimore longitudinal study of aging'], ['adni', 'adni', 'adni', 'adni', 'adni', 'adni'], ['adni'], ['adni'], ['adni'], ['adni'], ['slosh for international dataset'], ['adni'], ['trends in international mathematics and science study'], ['adni'], ['census of agriculture of'], ['adni', 'adni'], ['slosh', 'coastal', 'slosh model international program', 'coastal model analysis program'], ['ibtracs for track archive', 'ibtracs change analysis archive'], [], ['adni'], ['adni'], ['agricultural resource management survey'], ['adni'], ['adni'], ['trends in international mathematics and science study', 'trends in international mathematics and science study'], ['adni'], ['survey and on survey and science'], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['early childhood longitudinal study'], ['trends in international mathematics and science study'], ['baltimore longitudinal study of aging blsa'], ['agricultural resource management survey'], ['adni'], ['trends in international mathematics and science study'], ['baltimore longitudinal study of aging blsa'], ['baltimore longitudinal study of aging'], ['early childhood longitudinal study'], ['early childhood longitudinal study'], ['national education longitudinal study'], ['trends in international mathematics and science study'], ['adni'], ['early childhood longitudinal study'], ['adni'], ['adni'], ['adni'], ['baccalaureate and beyond study and', 'baccalaureate and beyond study and'], ['adni', 'adni'], ['adni'], ['early childhood longitudinal study', 'early childhood longitudinal study', 'early childhood longitudinal study'], ['baccalaureate and beyond study and'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['education education longitudinal study'], ['genome sequence genome sars'], ['baltimore longitudinal study of aging blsa'], ['survey of doctorate recipients', 'survey of doctorate survey and science study'], ['baltimore longitudinal study of aging blsa'], ['trends in international mathematics and science study'], ['adni'], ['trends in international mathematics and science study'], [], ['survey of doctorate recipients'], [], ['trends in international mathematics and science study'], ['adni', 'adni'], ['beginning postsecondary students study'], ['baltimore longitudinal study of aging blsa'], ['national education longitudinal study'], [], [], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['baltimore longitudinal study of aging'], ['adni'], ['adni'], ['adni', 'adni'], ['early childhood longitudinal study'], ['adni'], ['national education longitudinal study'], ['adni'], ['baltimore', 'adni'], ['adni'], ['adni', 'adni'], ['early childhood longitudinal study'], [], ['trends in international mathematics and science study', 'trends in international mathematics and science study'], ['adni'], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['trends in international mathematics and science study'], ['beginning postsecondary students study'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['beginning postsecondary students study and'], ['early childhood longitudinal study'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni'], ['adni'], ['national education longitudinal'], ['adni'], ['trends in international mathematics and science study'], ['adni'], ['trends in international mathematics and science study'], ['trends in international mathematics and science study'], ['adni'], ['national education longitudinal study'], ['adni', 'adni'], ['adni'], ['adni'], [], ['adni'], [], ['adni', 'adni'], ['early childhood longitudinal study', 'early childhood longitudinal study'], ['adni'], ['adni', 'adni'], ['adni'], ['adni', 'adni'], [], ['agricultural resource management survey'], ['adni'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni', 'adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['adni'], ['adni', 'adni'], ['trends in international mathematics and science study'], [], ['baltimore longitudinal study of aging'], ['adni'], ['adni'], ['adni'], ['adni'], ['trends in international mathematics and science study'], ['baltimore longitudinal study of aging blsa'], ['agricultural resource management survey'], ['adni', 'adni'], [], ['adni'], ['genome sequence of sars cov'], ['education education longitudinal study', 'national education longitudinal'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['adni s disease neuroimaging initiative adni', 'adni'], ['adni', 'adni'], [], ['adni'], [], ['adni', 'adni'], ['adni'], ['adni', 'adni s disease neuroimaging initiative adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['adni'], ['adni'], ['survey of doctorate recipients'], ['trends in international mathematics and science study'], ['agricultural resource management survey'], ['adni s disease neuroimaging initiative adni'], ['north american breeding bird survey'], ['adni'], ['genome sequence of sars'], ['baltimore longitudinal study of aging'], ['adni', 'adni'], ['adni'], [], ['baltimore longitudinal study of aging blsa'], ['adni'], [], ['trends in international mathematics and science study'], ['adni s disease neuroimaging initiative adni'], ['early childhood longitudinal study'], ['adni'], ['early childhood longitudinal study'], ['genome sequence of sars cov', 'adni'], ['national education longitudinal study'], ['adni', 'adni'], [], [], ['genome sequence of sars'], ['adni', 'adni'], ['national education international mathematics assessment'], ['covid open research dataset'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['agricultural resource management survey'], ['adni'], ['adni'], ['adni', 'adni'], ['national education study study', 'trends in international mathematics and science study'], ['baltimore'], ['adni'], ['world ocean database'], ['adni', 'adni'], ['trends in international mathematics and science study'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['early childhood longitudinal study'], ['adni'], ['adni', 'adni'], ['baltimore longitudinal study of aging blsa'], ['national education longitudinal study'], ['beginning postsecondary students study'], ['adni', 'adni', 'adni'], [], ['adni'], ['north american breeding bird survey'], ['adni'], ['baltimore longitudinal study of aging blsa'], ['census of agriculture', 'census resource management survey'], ['adni'], ['adni'], ['adni'], ['early childhood longitudinal study', 'early childhood longitudinal study'], ['adni'], ['adni'], ['trends in international mathematics and science study'], ['north american breeding bird survey', 'north american breeding bird'], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['covid open research dataset'], ['adni'], ['adni'], ['our open study sars'], ['north american breeding bird survey'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['early childhood study study'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['trends in international mathematics and science study', 'early longitudinal study study'], ['baltimore longitudinal study of aging blsa'], ['trends in international mathematics'], ['adni'], ['adni'], ['education education longitudinal'], ['genome sequence genome sars', 'genome sequence genome sars'], ['adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['genome sequence of sars'], ['adni'], ['adni'], ['adni'], ['agricultural resource management survey'], ['adni'], ['adni'], [], ['beginning postsecondary students study and'], ['adni', 'adni'], ['adni', 'adni'], ['adni', 'adni'], ['adni'], ['genome sequence of sars cov'], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['adni'], ['adni', 'adni'], ['genome sequence genome sars cov'], ['adni'], ['national education longitudinal'], ['adni', 'adni'], ['early childhood longitudinal study'], ['genome world of sars'], ['adni'], ['baltimore longitudinal study of aging'], [], ['baltimore longitudinal study of aging'], ['adni', 'adni'], ['genome sequence genome sars'], ['baltimore longitudinal study of aging'], ['baltimore longitudinal study of aging'], ['adni'], ['early childhood longitudinal study'], ['adni'], ['early childhood longitudinal study'], ['baltimore longitudinal study of aging blsa'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['north american breeding bird survey'], ['adni', 'adni'], ['adni'], ['early childhood longitudinal study', 'early childhood longitudinal study'], ['genome sequence of sars', 'genome sequence research sars'], ['trends in international mathematics and science study'], ['world ocean database'], ['adni'], ['genome sequence of sars cov'], ['adni'], ['north american breeding bird survey', 'north resource breeding bird survey'], ['north american breeding bird survey'], ['trends in international mathematics and science study'], [], ['coastal change analysis program'], ['adni'], ['adni', 'adni'], ['adni', 'adni'], [], ['baltimore longitudinal study neuroimaging aging adni', 'baltimore longitudinal study of aging'], ['baltimore longitudinal study of aging'], ['adni', 'adni'], ['adni'], ['adni'], ['coastal change analysis program'], ['north american breeding bird survey'], ['adni'], ['adni'], ['adni'], ['genome sequence of sars cov'], ['adni', 'adni s disease neuroimaging initiative adni'], ['ibtracs'], ['adni s disease neuroimaging initiative adni'], ['adni'], ['agricultural resource management survey'], ['adni', 'baltimore longitudinal study of aging'], ['early childhood longitudinal study'], ['adni', 'adni'], ['adni s disease neuroimaging initiative adni'], [], ['early childhood longitudinal study'], ['adni', 'adni'], ['baltimore'], ['agricultural resource management survey'], ['north american breeding bird survey'], [], ['adni'], ['adni'], ['adni'], ['national education longitudinal'], ['north american breeding bird survey', 'north american breeding bird survey'], [], ['adni'], ['covid open research dataset'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['adni'], ['covid open international dataset'], ['adni', 'adni'], [], ['north american breeding bird survey'], ['adni'], ['trends in international mathematics and science study'], ['baltimore longitudinal study of aging'], ['adni'], ['trends in international mathematics and science study'], [], ['agricultural resource management survey', 'agricultural resource management survey'], ['national education longitudinal study'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['adni'], ['coastal change analysis program'], ['adni'], [], ['adni'], ['adni'], ['adni'], ['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa'], ['trends in international mathematics and science study'], ['covid open research'], [], ['agricultural resource management survey'], ['national education longitudinal study', 'national education students'], ['baccalaureate and beyond study and'], ['trends in international mathematics and science study'], ['north american breeding bird survey'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni'], [], ['adni'], ['adni'], ['adni', 'adni'], ['adni'], ['beginning postsecondary students study and'], ['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa'], ['agricultural resource management survey'], ['adni'], [], ['trends in international mathematics and science study'], [], [], ['covid open research dataset'], [], ['adni'], ['adni'], ['adni'], ['adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['adni'], ['adni'], ['census of agriculture'], ['adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['trends in international mathematics and science study'], ['adni'], ['beginning postsecondary students study'], ['adni'], ['adni', 'adni'], ['adni'], [], ['baltimore longitudinal study of aging', 'baltimore longitudinal study study aging blsa study'], ['adni in international of initiative adni', 'adni'], ['adni', 'adni'], ['coastal change management program', 'agricultural resource management survey survey'], ['adni', 'adni'], ['adni'], ['adni'], ['world ocean database bird'], ['education education longitudinal study'], ['adni'], ['early childhood longitudinal study'], ['survey of doctorate recipients'], ['early childhood longitudinal study'], ['genome'], ['ibtracs model international', 'ibtracs'], ['census of agriculture'], ['baltimore longitudinal study of aging blsa'], [], ['baltimore longitudinal study of aging blsa'], ['trends in international mathematics and science study'], ['adni'], ['beginning postsecondary students study and'], ['adni', 'adni'], ['adni'], ['adni'], ['adni'], ['adni'], ['genome sequence of sars'], [], ['adni'], ['trends in international mathematics and science study'], ['survey of agriculture', 'census resource agriculture', 'trends in international mathematics', 'census of agriculture'], ['adni'], ['north american breeding bird survey'], ['adni'], ['covid open research dataset'], ['national education longitudinal study'], ['adni'], ['agricultural resource management survey survey'], ['adni'], ['agricultural resource management survey'], ['adni'], ['adni s disease neuroimaging initiative adni'], ['adni', 'adni'], ['adni'], ['trends in international mathematics and science study'], ['early childhood longitudinal study'], ['adni', 'adni s disease neuroimaging initiative adni'], ['baltimore longitudinal study of aging'], ['baltimore longitudinal study of aging blsa'], ['genome sequence of sars cov'], ['adni'], ['adni'], ['north american breeding bird survey'], ['adni model research dataset'], ['adni'], ['adni'], ['trends in international mathematics and science study'], ['coastal change analysis'], ['national education longitudinal'], ['baltimore longitudinal study of aging blsa'], ['north american breeding bird survey'], ['ibtracs', 'adni', 'adni'], ['trends in international mathematics and science study'], ['adni', 'adni'], ['adni', 'adni s disease neuroimaging initiative adni', 'adni'], ['adni'], ['adni'], ['national of doctorate recipients aging']]
</code>
Checking Classifer Accuracy_____no_output_____
<code>
len(chunked_text)_____no_output_____count = 0
for i in predictions:
if not i:
count += 1
print(count)82
testing = {}
for i in range(0, len(predictions)):
if publication_ids[2000+i] not in testing.keys():
pred = predictions[i]
print(pred)
print(df.loc[2000+i][5])
testing[publication_ids[2000+i]] = (pred, [df.loc[2000+i][5]])
else:
testing[publication_ids[2000+i]][1].append(df.loc[2000+i][5])['adni']
adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
[]
noaa c cap
['adni']
adni
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['coastal change analysis program']
slosh model
['census change agriculture', 'agricultural resource management survey']
census of agriculture
['adni', 'adni']
adni
['genome sequence of sars cov', 'covid open study of']
genome sequence of sars cov 2
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
[]
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['agricultural resource management survey', 'census of agriculture']
census of agriculture
['adni s disease neuroimaging initiative adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging', 'early childhood longitudinal study']
early childhood longitudinal study
['national education longitudinal']
national education longitudinal study
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['covid open research dataset']
covid 19 open research dataset cord 19
['early childhood longitudinal study']
early childhood longitudinal study
['adni', 'adni']
adni
['adni']
adni
['adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['national education longitudinal study', 'national education longitudinal']
education longitudinal study
['coastal change analysis program']
noaa c cap
[]
world ocean database
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['baccalaureate and beyond study and']
baccalaureate and beyond longitudinal study
['adni', 'adni']
adni
['adni']
adni
['covid open study mathematics']
education longitudinal study
['baltimore of study of aging blsa']
baltimore longitudinal study of aging blsa
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
adni
['genome']
genome sequence of sars cov 2
['adni']
adni
[]
national assessment of education progress
['census of agriculture', 'census of agriculture']
census of agriculture
[]
national assessment of education progress
['adni']
adni
[]
rural urban continuum codes
[]
national teacher and principal survey
['adni s disease neuroimaging initiative adni']
adni
[]
slosh model
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
[]
national teacher and principal survey
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni', 'adni']
alzheimer s disease neuroimaging initiative adni
[]
world ocean database
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey bbs
['adni']
survey of industrial research and development
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
baltimore longitudinal study of aging
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni s disease neuroimaging initiative adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni s disease neuroimaging initiative adni']
adni
['adni']
adni
['adni', 'adni']
adni
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['agricultural resource management survey']
agricultural resource management survey
['trends for international mathematics']
beginning postsecondary student
['adni']
adni
['adni']
adni
['baltimore']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['national education longitudinal study']
beginning postsecondary students longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['genome sequence of sars']
genome sequence of sars cov 2
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['north american breeding bird survey', 'north resource breeding bird survey']
north american breeding bird survey bbs
['adni', 'adni']
adni
['adni', 'adni']
adni
['adni']
adni
['census of agriculture']
census of agriculture
['adni', 'adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['national education longitudinal study']
national education longitudinal study
['adni', 'adni']
adni
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['covid open research dataset']
covid 19 open research dataset
[]
coastal change analysis program
['baccalaureate and beyond study and']
baccalaureate and beyond
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
[]
alzheimer s disease neuroimaging initiative adni
['adni']
adni
[]
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science adult competencies']
trends in international mathematics and science study
[]
adni
['adni', 'adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['north american breeding bird survey']
north american breeding bird survey
['north american breeding bird survey', 'north american breeding bird survey']
north american breeding bird survey
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['genome sequence of sars cov']
sars cov 2 genome sequences
['national childhood longitudinal']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
[]
world ocean database
['national longitudinal longitudinal study']
early childhood longitudinal study
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['education education longitudinal study']
education longitudinal study
['adni', 'adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['trends and international mathematics and science study']
trends in international mathematics and science study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimers disease neuroimaging initiative
['adni', 'early childhood study of aging adni']
adni
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey bbs
['adni']
adni
['adni']
adni
[]
beginning postsecondary students longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
[]
ibtracs
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['coastal of of']
slosh model
['census of agriculture']
census of agriculture
['baccalaureate and beyond study and']
baccalaureate and beyond longitudinal study
[]
common core of data
['adni']
adni
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['beginning postsecondary students study and']
beginning postsecondary students
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
baltimore longitudinal study of aging
[]
common core of data
['baccalaureate and beyond']
baccalaureate and beyond longitudinal study
['program for the of assessment']
program for the international assessment of adult competencies
['adni']
adni
['survey of doctorate recipients']
survey of earned doctorates
['trends in international mathematics and science study', 'trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['national education longitudinal study']
education longitudinal study
['trends in international mathematics and science']
trends in international mathematics and science study
['coastal change analysis program']
national water level observation network
['adni', 'adni']
adni
[]
baltimore longitudinal study of aging blsa
['national education longitudinal study']
education longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni longitudinal study neuroimaging aging adni']
baltimore longitudinal study of aging
['adni s disease neuroimaging initiative adni', 'adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['early childhood longitudinal study']
early childhood longitudinal study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['early childhood longitudinal study', 'early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
alzheimer s disease neuroimaging initiative adni
['survey of doctorate recipients']
survey of earned doctorates
['beginning postsecondary students']
beginning postsecondary students longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['genome']
sars cov 2 genome sequence
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni']
adni
['national education longitudinal study']
national education longitudinal study
['adni']
adni
['adni', 'adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['beginning postsecondary students study and']
beginning postsecondary student
['adni', 'adni']
adni
['adni']
adni
['adni s disease neuroimaging initiative adni']
adni
['ibtracs']
international best track archive for climate stewardship
['alzheimer in disease neuroimaging initiative adni', 'adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
[]
ibtracs
['adni']
adni
['adni', 'adni']
adni
['early childhood longitudinal study', 'early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['trends of international mathematics']
census of agriculture
['adni', 'coastal']
slosh model
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['national']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['genome world of sars']
genome sequences of sars cov 2
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni s disease neuroimaging initiative adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni']
adni
[]
early childhood longitudinal study
['survey of doctorate recipients']
national science foundation survey of earned doctorates
['survey of doctorate recipients']
survey of earned doctorates
['baltimore', 'adni']
alzheimer s disease neuroimaging initiative adni
['agricultural resource management survey']
agricultural resource management survey
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni s disease neuroimaging initiative adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['survey and on survey and science', 'national in international']
school survey on crime and safety
['adni s disease neuroimaging initiative adni']
adni
['genome sequence of sars']
genome sequences of sars cov 2
['trends in international mathematics and science study']
trends in international mathematics and science study
['survey of doctorate recipients']
survey of doctorate recipients
['adni', 'adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni']
adni
['trends for international mathematics assessment science adult competencies']
program for the international assessment of adult competencies
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['covid open research dataset']
covid 19 open research dataset
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['survey of doctorate recipients']
survey of doctorate recipients
['adni']
adni
['baltimore']
alzheimer s disease neuroimaging initiative adni
['survey of doctorate recipients', 'survey of doctorate recipients']
survey of doctorate recipients
['early childhood longitudinal study']
early childhood longitudinal study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni', 'adni', 'adni of disease of initiative adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
['beginning postsecondary students', 'survey of doctorate recipients', 'baccalaureate childhood students', 'beginning postsecondary students study']
beginning postsecondary students
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study', 'early childhood longitudinal study']
early childhood longitudinal study
['early childhood longitudinal study']
education longitudinal study
['early childhood longitudinal study']
early childhood longitudinal study
['early childhood longitudinal study']
early childhood longitudinal study
['agricultural resource management survey']
agricultural resource management survey
['adni', 'adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['survey of doctorate recipients']
survey of earned doctorates
['adni', 'adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni longitudinal disease neuroimaging initiative adni']
adni
['survey of earned doctorates']
survey of earned doctorates
['adni']
adni
['survey of doctorate recipients']
national science foundation survey of earned doctorates
[]
trends in international mathematics and science study
[]
national teacher and principal survey
['adni', 'adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni s disease neuroimaging initiative adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni']
adni
['adni']
adni
['adni']
adni
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimers disease neuroimaging initiative
['census of agriculture']
census of agriculture
['our']
our world in data
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
[]
baltimore longitudinal study of aging
['genome sequence of sars cov']
covid 19 open research dataset
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of initiative', 'adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
[]
trends in international mathematics and science study
['national education students']
survey of earned doctorates
['adni']
adni
[]
trends in international mathematics and science study
['covid open research dataset']
covid 19 open research dataset
['agricultural resource management survey']
agricultural resource management survey
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['census of agriculture']
census of agriculture
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['world ocean database']
world ocean database
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['national education longitudinal study']
education longitudinal study
['adni', 'adni']
adni
['adni']
adni
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['adni']
adni
[]
slosh model
[]
world ocean database
[]
early childhood longitudinal study
['adni']
adni
[]
genome sequence of sars cov 2
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
adni
['national education students study']
baccalaureate and beyond
[]
early childhood longitudinal study
['north american breeding bird survey']
north american breeding bird survey
['national education longitudinal study']
national education longitudinal study
['baltimore']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni', 'adni']
adni
['adni', 'adni s disease neuroimaging initiative adni']
adni
[]
rural urban continuum codes
['baltimore longitudinal disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
[]
trends in international mathematics and science study
['adni']
adni
['agricultural resource management survey', 'agricultural resource management survey', 'agricultural resource management survey']
agricultural resource management survey
[]
survey of doctorate recipients
[]
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['genome sequence of sars cov adni']
sars cov 2 genome sequences
['trends in international mathematics assessment science study competencies', 'program for international mathematics assessment science adult competencies']
program for the international assessment of adult competencies
['national education longitudinal', 'beginning childhood longitudinal']
high school longitudinal study
['baccalaureate postsecondary beyond study and']
baccalaureate and beyond longitudinal study
['coastal change analysis survey']
coastal change analysis program
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
adni
['baltimore longitudinal study study aging']
baltimore longitudinal study of aging
['trends in international mathematics and science study']
trends in international mathematics and science study
['genome open of sars']
our world in data
['trends in international mathematics and science study']
trends in international mathematics and science study
['genome sequence of sars cov']
sars cov 2 genome sequence
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni s disease neuroimaging initiative adni']
adni
['adni']
adni
['adni']
adni
['baltimore longitudinal disease neuroimaging initiative adni']
adni
['national education longitudinal']
national education longitudinal study
['early childhood longitudinal study']
early childhood longitudinal study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
[]
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['north american breeding bird survey']
north american breeding bird survey
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['national of doctorate recipients']
survey of doctorate recipients
['early childhood longitudinal study']
early childhood longitudinal study
['adni s disease neuroimaging initiative adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni', 'adni']
adni
['adni']
adni
['genome sequence of sars']
genome sequence of sars cov 2
['adni']
alzheimer s disease neuroimaging initiative adni
[]
slosh model
[]
census of agriculture
['adni']
adni
['adni of of', 'early childhood study study']
early childhood longitudinal study
['adni']
adni
['census of agriculture', 'agricultural resource management survey']
agricultural resource management survey
['north american breeding bird survey']
north american breeding bird survey
[]
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'ibtracs']
ibtracs
['ibtracs']
international best track archive for climate stewardship
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['census of agriculture', 'agricultural resource management survey']
agricultural resource management survey
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['baltimore longitudinal study bird']
baltimore longitudinal study of aging
['survey of doctorate recipients', 'survey of doctorate recipients']
survey of earned doctorates
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
[]
genome sequence of sars cov 2
['national education study']
education longitudinal study
['adni']
alzheimers disease neuroimaging initiative
[]
our world in data
['adni']
baltimore longitudinal study of aging
['adni']
adni
['beginning postsecondary students study and']
beginning postsecondary students
['north american breeding bird survey']
north american breeding bird survey
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
[]
early childhood longitudinal study
['our open international sars']
genome sequence of sars cov 2
['adni s disease neuroimaging initiative adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore']
baltimore longitudinal study of aging blsa
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['genome cov of sars']
genome sequence of sars cov 2
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni', 'adni', 'adni']
adni
['beginning postsecondary students study']
beginning postsecondary students
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni', 'adni', 'adni', 'adni', 'adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['slosh for international dataset']
slosh model
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['census of agriculture of']
census of agriculture
['adni', 'adni']
adni
['slosh', 'coastal', 'slosh model international program', 'coastal model analysis program']
noaa tidal station
['ibtracs for track archive', 'ibtracs change analysis archive']
ibtracs
[]
baccalaureate and beyond
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
sars cov 2 genome sequence
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['survey and on survey and science']
school survey on crime and safety
['adni']
adni
['adni']
adni
['adni', 'adni']
adni
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['agricultural resource management survey']
agricultural resource management survey
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['early childhood longitudinal study']
early childhood longitudinal study
['early childhood longitudinal study']
early childhood longitudinal study
['national education longitudinal study']
national education longitudinal study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baccalaureate and beyond study and', 'baccalaureate and beyond study and']
baccalaureate and beyond longitudinal study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['early childhood longitudinal study', 'early childhood longitudinal study', 'early childhood longitudinal study']
education longitudinal study
['baccalaureate and beyond study and']
baccalaureate and beyond
['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['education education longitudinal study']
national education longitudinal study
['genome sequence genome sars']
genome sequences of sars cov 2
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['survey of doctorate recipients', 'survey of doctorate survey and science study']
survey of graduate students and postdoctorates in science and engineering
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
[]
genome sequences of sars cov 2
['survey of doctorate recipients']
survey of earned doctorates
[]
national teacher and principal survey
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
adni
['beginning postsecondary students study']
beginning postsecondary students
['national education longitudinal study']
beginning postsecondary student
[]
north american breeding bird survey
[]
north american breeding bird survey
['adni']
adni
['adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['national education longitudinal study']
education longitudinal study
['adni']
adni
['baltimore', 'adni']
adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['early childhood longitudinal study']
early childhood longitudinal study
[]
adni
['trends in international mathematics and science study', 'trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['trends in international mathematics and science study']
trends in international mathematics and science study
['beginning postsecondary students study']
beginning postsecondary students
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['beginning postsecondary students study and']
beginning postsecondary students
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni s disease neuroimaging initiative adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['national education longitudinal']
national education longitudinal study
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['national education longitudinal study']
national education longitudinal study
['adni', 'adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
[]
national teacher and principal survey
['adni']
adni
[]
genome sequence of sars cov 2
['adni', 'adni']
adni
['early childhood longitudinal study', 'early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni', 'adni']
adni
['adni']
adni
['adni', 'adni']
adni
[]
alzheimer s disease neuroimaging initiative adni
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
['adni s disease neuroimaging initiative adni', 'adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
[]
trends in international mathematics and science study
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['agricultural resource management survey']
agricultural resource management survey
['adni', 'adni']
adni
[]
sars cov 2 genome sequence
['adni']
adni
['genome sequence of sars cov']
covid 19 image data collection
['education education longitudinal study', 'national education longitudinal']
education longitudinal study
['adni', 'adni']
adni
[]
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
[]
baccalaureate and beyond
['adni']
adni
['adni', 'adni s disease neuroimaging initiative adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['adni']
adni
['survey of doctorate recipients']
survey of doctorate recipients
['trends in international mathematics and science study']
trends in international mathematics and science study
['agricultural resource management survey']
agricultural resource management survey
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['north american breeding bird survey']
north american breeding bird survey bbs
['adni']
alzheimer s disease neuroimaging initiative adni
['genome sequence of sars']
genome sequence of 2019 ncov
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
[]
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
[]
ibtracs
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni s disease neuroimaging initiative adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['early childhood longitudinal study']
early childhood longitudinal study
['genome sequence of sars cov', 'adni']
genome sequence of sars cov 2
['national education longitudinal study']
education longitudinal study
['adni', 'adni']
adni
[]
survey of earned doctorates
[]
agricultural resource management survey
['genome sequence of sars']
sars cov 2 genome sequences
['adni', 'adni']
adni
['national education international mathematics assessment']
national assessment of education progress
['covid open research dataset']
covid 19 open research dataset
['adni']
adni
['adni']
adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
['adni', 'adni']
adni
['national education study study', 'trends in international mathematics and science study']
education longitudinal study
['baltimore']
baltimore longitudinal study of aging blsa
['adni']
alzheimer s disease neuroimaging initiative adni
['world ocean database']
world ocean database
['adni', 'adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni']
adni
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['national education longitudinal study']
national education longitudinal study
['beginning postsecondary students study']
beginning postsecondary student
['adni', 'adni', 'adni']
alzheimer s disease neuroimaging initiative adni
[]
census of agriculture
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey bbs
['adni']
adni
['census of agriculture', 'census resource management survey']
census of agriculture
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['early childhood longitudinal study', 'early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['north american breeding bird survey', 'north american breeding bird']
north american breeding bird survey
['adni']
adni
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['covid open research dataset']
covid 19 open research dataset cord 19
['adni']
adni
['adni']
adni
['our open study sars']
genome sequence of sars cov 2
['north american breeding bird survey']
north american breeding bird survey
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['early childhood study study']
early childhood longitudinal study
['adni', 'adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['trends in international mathematics and science study', 'early longitudinal study study']
trends in international mathematics and science study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['trends in international mathematics']
our world in data
['adni']
adni
['education education longitudinal']
national education longitudinal study
['genome sequence genome sars', 'genome sequence genome sars']
genome sequence of 2019 ncov
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['genome sequence of sars']
genome sequences of sars cov 2
['adni']
adni
['adni']
adni
['adni']
adni
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
[]
census of agriculture
['beginning postsecondary students study and']
beginning postsecondary students longitudinal study
['adni', 'adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['genome sequence of sars cov']
sars cov 2 genome sequences
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['adni']
adni
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['genome sequence genome sars cov']
genome sequences of sars cov 2
['adni']
adni
['national education longitudinal']
education longitudinal study
['adni', 'adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['genome world of sars']
genome sequence of sars cov 2
['adni']
adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
[]
national assessment of education progress
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['genome sequence genome sars']
genome sequences of sars cov 2
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['adni']
adni
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['adni']
adni
['adni']
adni
['adni']
national assessment of education progress
['adni', 'adni']
adni
['adni']
adni
['early childhood longitudinal study', 'early childhood longitudinal study']
early childhood longitudinal study
['genome sequence of sars', 'genome sequence research sars']
genome sequence of sars cov 2
['trends in international mathematics and science study']
trends in international mathematics and science study
['world ocean database']
world ocean database
['adni']
alzheimer s disease neuroimaging initiative adni
['genome sequence of sars cov']
sars cov 2 genome sequence
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey
['trends in international mathematics and science study']
trends in international mathematics and science study
[]
alzheimer s disease neuroimaging initiative adni
['coastal change analysis program']
national water level observation network
['adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
['adni', 'adni']
adni
[]
our world in data
['baltimore longitudinal study neuroimaging aging adni', 'baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['coastal change analysis program']
coastal change analysis program
['north american breeding bird survey']
coastal change analysis program
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['genome sequence of sars cov']
sars cov 2 genome sequence
['adni', 'adni s disease neuroimaging initiative adni']
adni
['adni s disease neuroimaging initiative adni']
adni
['adni']
adni
['agricultural resource management survey']
agricultural resources management survey
['adni', 'baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['early childhood longitudinal study']
early childhood longitudinal study
['adni', 'adni']
adni
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni', 'adni']
adni
['baltimore']
survey of industrial research and development
['agricultural resource management survey']
census of agriculture
[]
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
adni
['adni']
adni
['national education longitudinal']
education longitudinal study
['north american breeding bird survey', 'north american breeding bird survey']
north american breeding bird survey
[]
sars cov 2 genome sequence
['adni']
alzheimer s disease neuroimaging initiative adni
['covid open research dataset']
covid 19 open research dataset cord 19
['adni']
adni
['covid open international dataset']
sars cov 2 genome sequence
['adni', 'adni']
adni
[]
sars cov 2 genome sequence
['north american breeding bird survey']
north american breeding bird survey
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['trends in international mathematics and science study']
trends in international mathematics and science study
[]
adni
['agricultural resource management survey', 'agricultural resource management survey']
agricultural resource management survey
['national education longitudinal study']
high school longitudinal study
['baltimore longitudinal study of aging', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['adni']
alzheimer s disease neuroimaging initiative adni
['coastal change analysis program']
coastal change analysis program
['adni']
adni
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging blsa', 'baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['trends in international mathematics and science study']
trends in international mathematics and science study
['covid open research']
covid 19 open research dataset cord 19
[]
adni
['agricultural resource management survey']
agricultural resource management survey
['national education longitudinal study', 'national education students']
national education longitudinal study
['baccalaureate and beyond study and']
baccalaureate and beyond
['trends in international mathematics and science study']
trends in international mathematics and science study
['north american breeding bird survey']
north american breeding bird survey bbs
['adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['adni', 'adni']
adni
[]
adni
['adni']
adni
['adni']
adni
['adni', 'adni']
adni
['adni']
alzheimers disease neuroimaging initiative
['beginning postsecondary students study and']
beginning postsecondary students
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
[]
census of agriculture
['trends in international mathematics and science study']
trends in international mathematics and science study
[]
sars cov 2 genome sequence
[]
adni
['covid open research dataset']
covid 19 open research dataset cord 19
[]
our world in data
['adni']
adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['census of agriculture']
census of agriculture
['adni']
alzheimer s disease neuroimaging initiative adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni', 'adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
[]
agricultural resource management survey
['baltimore longitudinal study of aging', 'baltimore longitudinal study study aging blsa study']
baltimore longitudinal study of aging blsa
['adni in international of initiative adni', 'adni']
adni
['coastal change management program', 'agricultural resource management survey survey']
agricultural resource management survey
['adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['world ocean database bird']
world ocean database
['education education longitudinal study']
national education longitudinal study
['adni']
alzheimer s disease neuroimaging initiative adni
['early childhood longitudinal study']
early childhood longitudinal study
['survey of doctorate recipients']
survey of doctorate recipients
['early childhood longitudinal study']
early childhood longitudinal study
['genome']
sars cov 2 genome sequences
['ibtracs model international', 'ibtracs']
ibtracs
['census of agriculture']
census of agriculture
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging blsa
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni']
adni
['beginning postsecondary students study and']
beginning postsecondary students
['adni', 'adni']
adni
['adni']
adni
['adni']
adni
['adni']
adni
['adni']
adni
['genome sequence of sars']
genome sequence of sars cov 2
[]
ibtracs
['adni']
baltimore longitudinal study of aging
['trends in international mathematics and science study']
trends in international mathematics and science study
['survey of agriculture', 'census resource agriculture', 'trends in international mathematics', 'census of agriculture']
census of agriculture
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey
['adni']
adni
['covid open research dataset']
covid 19 open research dataset
['adni']
adni
['agricultural resource management survey survey']
agricultural resource management survey
['agricultural resource management survey']
agricultural resource management survey
['adni']
adni
['adni', 'adni']
adni
['trends in international mathematics and science study']
adni
['early childhood longitudinal study']
early childhood longitudinal study
['adni', 'adni s disease neuroimaging initiative adni']
alzheimer s disease neuroimaging initiative adni
['baltimore longitudinal study of aging']
baltimore longitudinal study of aging
['baltimore longitudinal study of aging blsa']
baltimore longitudinal study of aging
['genome sequence of sars cov']
genome sequence of sars cov 2
['adni']
adni
['north american breeding bird survey']
north american breeding bird survey
['adni model research dataset']
slosh model
['adni']
adni
['adni']
adni
['trends in international mathematics and science study']
trends in international mathematics and science study
['coastal change analysis']
coastal change analysis program
['national education longitudinal']
common core of data
['north american breeding bird survey']
north american breeding bird survey
['ibtracs', 'adni', 'adni']
slosh model
['trends in international mathematics and science study']
trends in international mathematics and science study
['adni', 'adni s disease neuroimaging initiative adni', 'adni']
alzheimer s disease neuroimaging initiative adni
['adni']
adni
['adni']
alzheimer s disease neuroimaging initiative adni
['national of doctorate recipients aging']
survey of earned doctorates
tp = 0
fp = 0
fn = 0
for i in testing.values():
prediction = i[0]
cop = set(prediction.copy())
true_pred = i[1].copy()
check = False
#check exact match first
for j in prediction:
if j in true_pred:
tp += 1
true_pred.remove(j)
cop.remove(j)
#then check rest for jaccard score
for j in cop:
found = False
removal = 0
for k in true_pred:
if jaccard(j, k) >= 0.5:
found = True
removal = k
break
if found:
tp += 1
true_pred.remove(removal)
else:
fp += 1
fn += len(true_pred)_____no_output_____
</code>
Testing Performance_____no_output_____
<code>
print("testing performance")
print("micro F score")
print(fp)
print(fn)
print(tp/(tp + 1/2*(fp+fn)))
print("accuracy")
print(tp/(tp+fn))testing performance
micro F score
291
356
0.6656330749354005
accuracy
0.644
</code>
| {
"repository": "arodriguezca/NLP-dataset",
"path": "bert_embed_seq2seq.ipynb",
"matched_keywords": [
"biomarkers"
],
"stars": null,
"size": 310624,
"hexsha": "d0c8e68cfc2a8c6f697c5519fd4a781854981fda",
"max_line_length": 29640,
"avg_line_length": 46.2375707056,
"alphanum_fraction": 0.5302262542
} |
# Notebook from STScI-MIRI/miricoord
Path: miricoord/BasicTransforms.ipynb
# A simple notebook to do MIRI coordinate transforms. #_____no_output_____Some functionality depends on having the JWST pipeline and/or pysiaf module installed_____no_output_____
<code>
import numpy as np
import pdb as pdb
from astropy.modeling import models
from asdf import AsdfFile
from jwst import datamodels
from jwst.assign_wcs import miri
import pysiaf_____no_output_____
</code>
### Imager transforms using standalone code plus pysiaf ###_____no_output_____Import the miricoord standalone code:_____no_output_____
<code>
import miricoord.imager.mirim_tools as mt_____no_output_____
</code>
Read the MIRI apertures from the SIAF_____no_output_____
<code>
siaf = pysiaf.Siaf('MIRI')#,basepath='/Users/dlaw/jwcode/pysiaf/pysiaf/pre_delivery_data/MIRI')_____no_output_____
</code>
Get the MIRIM_FULL x,y reference location from the SIAF_____no_output_____
<code>
xref,yref=siaf['MIRIM_FULL'].XDetRef,siaf['MIRIM_FULL'].YDetRef_____no_output_____
</code>
Note that these are in the SIAF 1-indexed reference frame; in order to use them we'll first have to transform them to the 0-indexed frame used by all MIRI coordinates code (and the JWST pipeline):_____no_output_____
<code>
xref,yref=xref-1,yref-1_____no_output_____xref,yref_____no_output_____
</code>
Transform them to v2,v3 for filter 'F770W'_____no_output_____
<code>
v2ref,v3ref=mt.xytov2v3(xref,yref,'F770W')_____no_output_____v2ref,v3ref_____no_output_____
</code>
This should be showing that the v2,v3 reference point of MIRIM_FULL (for which F770W is the reference filter) is -453.559, -373.814 (note that this changed in CDP-7)_____no_output_____We can also convert a given location to RA,DEC if we assume a few JWST attitude keywords. First import the miricoord telescope tools module:_____no_output_____
<code>
import miricoord.tel.tel_tools as teltools_____no_output_____
</code>
Let's pretend that the telescope pointing had the reference point looking at RA=312.5, DEC=-76.0, and had spacecraft roll 73 degrees_____no_output_____
<code>
raref,decref,rollref=312.5,-76.0,73.0_____no_output_____
</code>
Given that, we want to know where the location v2,v3=(-400,-420) is (this is somewhere in the coronagraphs):_____no_output_____
<code>
v2,v3=[-400.],[-420.]_____no_output_____ra,dec,newroll=teltools.jwst_v2v3toradec(v2,v3,v2ref=v2ref,v3ref=v3ref,raref=raref,decref=decref,rollref=rollref)_____no_output_____
</code>
The RA,dec of this point is:_____no_output_____
<code>
ra,dec_____no_output_____
</code>
And the local roll at this new location is:_____no_output_____
<code>
newroll_____no_output_____
</code>
Note that if we instead had a FITS header with the appropriate keywords, we could have passed that to jwst_v2v3toradec instead of individual values._____no_output_____### Now let's do an imager transform using the JWST pipeline code ###_____no_output_____Import the miricoord pipeline access code:_____no_output_____
<code>
import miricoord.imager.mirim_pipetools as mpt_____no_output_____v2ref,v3ref=mpt.xytov2v3(xref,yref,'F770W')_____no_output_____v2ref,v3ref_____no_output_____
</code>
This should be the same answer as before, but under the hood it used the JWST pipeline!_____no_output_____We can also access the pipeline distortion model directly:_____no_output_____
<code>
model=mpt.xytov2v3model('F770W')_____no_output_____
</code>
And use that to do forward transforms:_____no_output_____
<code>
model(xref,yref)_____no_output_____
</code>
And backward transforms:_____no_output_____
<code>
model.inverse(v2ref,v3ref)_____no_output_____
</code>
### Now do a conversion to Ideal coordinates using the SIAF apertures: ###_____no_output_____Let's work out where v2,v3=-415.069, -400.576 is for the LRS slit_____no_output_____
<code>
v2,v3=-415.069, -400.576_____no_output_____xideal,yideal=mt.v2v3toIdeal(v2,v3,'MIRIM_SLIT')SIAF version: PRDOPSSOC-031
xideal,yideal_____no_output_____
</code>
It's 0,0, which makes sense since this was the MIRIM_SLIT reference point._____no_output_____Now see what the lower-left corner of the LRS slit corresponds to in the SIAF:_____no_output_____
<code>
xideal,yideal=siaf['MIRIM_SLIT'].XIdlVert1,siaf['MIRIM_SLIT'].YIdlVert1_____no_output_____v2,v3=mt.Idealtov2v3(xideal,yideal,'MIRIM_SLIT')SIAF version: PRDOPSSOC-031
xideal,yideal,v2,v3_____no_output_____siaf['MIRIM_SLIT'].plot()_____no_output_____
</code>
As another example, APT requires Ideal coordinate offsets from the reference point_____no_output_____If we wanted to see where an offset of XIdeal,YIdeal=10,0 in filter F2300C would land a star on the imager detector compared to the nominal Lyot coronagraph reference point in F770W:_____no_output_____
<code>
xideal,yideal=10,0_____no_output_____v2,v3=mt.Idealtov2v3(xideal,yideal,'MIRIM_CORONLYOT')SIAF version: PRDOPSSOC-031
x,y=mt.v2v3toxy(v2,v3,'F2300C')_____no_output_____print(x,y)[231.28115409] [885.2137205]
siaf['MIRIM_CORONLYOT'].plot()_____no_output_____
</code>
### Now we'll do an MRS transform using standalone code plus pysiaf ###_____no_output_____
<code>
import miricoord.mrs.mrs_tools as mrst_____no_output_____
</code>
Get the MRS v2,v3 reference point from the SIAF_____no_output_____
<code>
v2ref,v3ref=siaf['MIRIFU_CHANNEL1A'].V2Ref,siaf['MIRIFU_CHANNEL1A'].V3Ref_____no_output_____v2ref,v3ref_____no_output_____siaf['MIRIFU_CHANNEL1A'].plot()_____no_output_____
</code>
Figure out what alpha,beta this is in Channel 1A_____no_output_____
<code>
alpha,beta=mrst.v2v3toab(v2ref,v3ref,'1A')_____no_output_____alpha,beta_____no_output_____
</code>
By design, it's zero,zero since this was the reference point_____no_output_____Now find out where pixels 50,60 55,60 and 60,60 on the SHORT detector would be for Ch1A_____no_output_____
<code>
x,y=[50,55,60],[60,60,60]_____no_output_____temp=mrst.xytoabl(x,y,'1A',trim=1)_____no_output_____temp_____no_output_____
</code>
Note that here the return is actually a dictionary of information, and that it is only 2 elements long. This is because we specified trim=1, which will remove any values that do not correspond to a light-sensitive slice._____no_output_____
<code>
v2,v3=mrst.abtov2v3(temp['alpha'],temp['beta'],'1A')_____no_output_____v2,v3_____no_output_____
</code>
### Now we'll do an MRS transform using the pipeline code ###_____no_output_____
<code>
import miricoord.mrs.mrs_pipetools as mrspt_____no_output_____x,y=30.31,511.0_____no_output_____a,b,l=mrspt.xytoabl(x,y,'1A')_____no_output_____print(a,b,l)0.4612954052724527 0.0 5.343680519509376
</code>
Be warned: using the pipeline code in this way can give strange results if you try to transform a pixel that doesn't land on a slice in your channel specified!! (The pipeline itself has code elsewhere to deal with this, but here we're hooking directly into the transform modules)._____no_output_____
| {
"repository": "STScI-MIRI/miricoord",
"path": "miricoord/BasicTransforms.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 54071,
"hexsha": "d0c9a5725cbddc7f0dce5b986a55387956a2862c",
"max_line_length": 13856,
"avg_line_length": 56.4415448852,
"alphanum_fraction": 0.8112481737
} |
# Notebook from hlab-repo/purity-and-danger
Path: Immigration.ipynb
<a href="https://colab.research.google.com/github/hlab-repo/purity-and-danger/blob/master/Immigration.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____# Creating a Model for Immigration and Outsider Language
This notebook starts with a baseline system and then provides users the opportunity to attempt to improve performance with their own custom, complete system._____no_output_____## Set-up_____no_output_____
<code>
%%capture
!pip install datasets
!pip install transformers_____no_output_____import re
from collections import Counter
import datasets
import pandas as pd
import torch
import torch.nn as nn
import torch.optim as optim
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics import accuracy_score, confusion_matrix, f1_score, precision_score, recall_score
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
from torch.utils.data import DataLoader
from transformers import BertTokenizer, BertForSequenceClassification_____no_output_____
</code>
## Getting a test dataset_____no_output_____We can start with the Common Crawl news corpus (January 2017 - December 2019). See here for details:
https://huggingface.co/datasets/cc_news
This will constitute our test dataset. Note that the pseudolabels were generated from the beginning of this dataset but that the dataset (of 708,241 news articles) was in no way exhausted. You could perhaps skip the first 20,000 or so articles to deal only with new data._____no_output_____
<code>
# this could take several minutes
dataset = datasets.load_dataset('cc_news')_____no_output_____dataset_____no_output_____# look at the first 10 samples
for i, s in enumerate(dataset['train']):
print(s)
if i >= 10:
break{'date': '2017-04-17 00:00:00', 'description': "Officials unsealed court documents Monday (April 17) to reveal details surrounding the first searches of Prince's Paisley Park estate.", 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/04/prince-young-and-sad.jpg?w=946', 'text': 'By Abby Hassler\nOfficials unsealed court documents Monday (April 17) to reveal details surrounding the first searches of Prince’s Paisley Park estate following his untimely death.\nRelated: Prince’s Ex-Wife Mayte Garcia Says Memoir is not a Tell-All\nThe unsealed search warrants don’t confirm the source of the drug, fentanyl, that led to the 57-year-old singer’s accidental, self-administered overdose last April, according to The Star Tribune.\nInvestigators found no prescriptions in Prince’s name, however, Dr. Michael Todd Schulenberg told detectives he had written a prescription for oxycodone, which is also an opioid, under the name of long-time Prince associate and drummer Kirk Johnson.\nBetween April 21 and Sept. 19, 2016, Carver County authorities conducted investigations into Prince’s death with a total of 11 search warrants.', 'title': 'Prince Search Warrants Unsealed, Answer Few Questions', 'url': 'http://1041jackfm.cbslocal.com/2017/04/17/prince-search-warrants-unsealed-2/'}
{'date': '2017-08-14 00:00:00', 'description': '"The spirit of Green Day has always been about rising above oppression."', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/08/billie-joe-armstrong-theo-wargo-getty-images.jpg?w=946', 'text': 'By Abby Hassler\nGreen Day’s Billie Joe Armstrong has always been outspoken about his political beliefs. Following the tragedy in Charlottesville, Virgina, over the weekend, Armstrong felt the need to speak out against the white supremacists who caused much of the violence.\nRelated: Billie Joe Armstrong Wins #TBT with Childhood Studio Photo\n“My heart feels heavy. I feel like what happened in Charlottesville goes beyond the point of anger,” Armstrong wrote on Facebook. “It makes me sad and desperate. shocked. I f—— hate racism more than anything.”\n“The spirit of Green Day has always been about rising above oppression. and sticking up for what you believe in and singing it at the top of your lungs,” Armstrong continued. “We grew up fearing nuclear holocaust because of the cold war. those days are feeling way too relevant these days. these issues are our ugly past.. and now it’s coming to haunt us. always resist these doomsday politicians. and in the words of our punk forefathers .. Nazi punks f— off.”', 'title': 'Green Day’s Billie Joe Armstrong Rails Against White Nationalists', 'url': 'http://1041jackfm.cbslocal.com/2017/08/14/billie-joe-armstrong-white-nationalists/'}
{'date': '2017-02-15 00:00:00', 'description': 'The concept, which has broken YouTube records and dominated social media, remains as simple and delightful as ever. Only this time, a rotating cast of celebrities will replace James Corden.', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/02/metallica-carpool.jpg?w=946', 'text': 'By Hayden Wright\nA trailer has been released for the next sequence of Carpool Karaoke videos, and the lineup is stellar. The CBS late-night segment will stream on Apple Music in a similar format, but with new guests and even crazier moments behind the wheel. The concept, which has broken YouTube records and dominated social media, remains as simple and delightful as ever. Only this time, a rotating cast of celebrities will replace James Corden.\nRelated: Adele’s ‘Carpool Karaoke’ is the Most Popular Viral Video of 2016\nHere are the moments we’re most excited for:\nMetallica making their headbanging mark on Rihanna’s “Diamonds” with Billy Eichner.\nJohn Legend duetting on with Alicia Keys on her breakout hit, “Falling.”\nAriana Grande belting the Little Shop of Horrors soundtrack with Seth MacFarlane.\nJames Corden’s return with Will Smith — they rap the Fresh Prince theme!\nChelsea Handler slinging whiskey and singing Bon Jovi’s “Living on a Prayer” with Blake Shelton.\nCorden also presides over an epic, R. Kelly-inspired key change for the Carpool Karaoke franchise — there’s a helicopter.\nA new installment will drop on Apple Music each week and the premiere is “coming soon.”\nWatch the preview here:', 'title': 'Upcoming ‘Carpool Karaoke’ Series Looks Epic', 'url': 'http://1041jackfm.cbslocal.com/2017/02/15/carpool-karaoke-series-epic/'}
{'date': '2017-01-01 00:00:00', 'description': '"S--- happens," said Carey.', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/01/mariah-carey-getty.jpg?w=946', 'text': 'By Brian Ives\nSo, you’re Mariah Carey, one of the world’s biggest stars for a quarter of a century, and you fail on national TV, big time. How do you handle it?\nAs it turns out, you handle it with humor.\nRelated: Mariah Carey Shades Ariana Grande, Demi Lovato\nFor those just waking up now: in case you were totally absent from social media on New Year’s Eve (and if so, good for you!), Mariah Carey had a disastrous performance—or perhaps, non-performance is a better way to describe it—on Dick Clark’s New Year’s Rockin’ Eve last night. As CBS News reported, Carey “paced the stage without singing. It’s unclear what exactly were the technical difficulties, but the disaster was obvious as it unfolded live.”\nPredictably, twitter erupted with mockery, including memes about 2016 claiming its final victim (Mariah Carey’s career).\nCarey’s response was a single [NSFW] tweet, “S— happens. Have a happy and healthy new year everybody! Here’s to making more headlines in 2017.”', 'title': 'Mariah Carey Reacts to Her NYE Performance with Humor', 'url': 'http://1041jackfm.cbslocal.com/2017/01/01/mariah-carey-reacts-to-her-nye-performance-with-humor/'}
{'date': '2017-10-06 00:00:00', 'description': 'Dubbed the "Raw Sessions Versions," the tracks are prototypes for the songs we know.', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/10/queen-photo-by-rogers-express-getty-images.jpg?w=946', 'text': 'By Hayden Wright\nLast month, Queen announced a deluxe box set celebrating the 40th anniversary of their 1977 album News of the World. In the press release, the band promised “Every lead vocal is different, as are most of the lead guitar parts and a great many other instrumental details.” Now Queen have revealed the reissue versions of “We Are the Champions” and “We Will Rock You,” two of the band’s best-loved songs.\nRelated: Queen Detail ‘News of the World’ Deluxe Box Set\nDubbed the “Raw Sessions Versions,” the tracks are prototypes for the songs we know: Freddie Mercury’s vocals are a bit looser on “We Will Rock You,” which begins with a few warmup bars of singing. Brian May’s guitar solo is quite different, too. You get the sense that Queen were feeling their way through the tracks as they recorded earlier versions. The piano arrangement on “We Are the Champions” is brighter and happier.\nThe News of the World box set debuts November 17. Listen to the never-before-heard raw sessions here:', 'title': 'Queen Release Alternate Takes of Classic Songs " 104.1 Jack FM', 'url': 'http://1041jackfm.cbslocal.com/2017/10/06/queen-studio-outtakes-we-will-rock-you-we-are-the-champions/'}
{'date': '2017-02-14 00:00:00', 'description': 'Katy Perry, Paul McCarteny and Miley Cyrus are just a few of the artists to spread the love.', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/02/grammy-red-carpet-43.jpg?w=946', 'text': "By Radio.com Staff\nIt’s Valentine’s day and artists are taking to social media to show love for their fans and significant others.\nRelated: John Mayer is Cupid’s Secret Weapon\nKaty Perry, Paul McCartney and Miley Cyrus are just a few of the musicians to spread the love.\nChris Young had his tongue planted firmly in his cheek when he wished fans a “Happy Taco Tuesday,” and then there was Kesha, who loves her fans, but worries her cats will eat her. Valid concern.\nCheck out the best Valentine’s Day messages below.\n❤✨So much love for my #KatyCats on this mushy day! Thanks for keeping me floating and grounded all at the same time… twitter.com/i/web/status/8… —\nKATY PERRY (@katyperry) February 14, 2017\nAll we need is love. Happy Valentine's Day. X #ValentinesDay https://t.co/DsxieopcYJ —\nPaul McCartney (@PaulMcCartney) February 14, 2017\nHappy Valentine's Day https://t.co/p4VIPntyHx —\nKim Kardashian West (@KimKardashian) February 14, 2017\nHave a Happy Hippie Valentimezzzzz! TBTuesday to my date night with @tywrent!!!!!! It's all about L-O-V-E everyday.… twitter.com/i/web/status/8… —\nMiley Ray Cyrus (@MileyCyrus) February 14, 2017\nHappy Valentine's Day to all our fans ❤️ https://t.co/IzYZEVsyEX —\nPearl Jam (@PearlJam) February 14, 2017\nHappy Valentine's Day ❤😘😍 I love you —\nAustin Mahone (@AustinMahone) February 14, 2017\nHappy Valentines Day X Adam https://t.co/eiKiysyNE3 —\n(@U2) February 14, 2017\nHappy Valentine's Day, ya creeps. —\nMark Hoppus (@markhoppus) February 14, 2017\nI think I'm forgetting something today... Oh yeah! Happy Taco Tuesday everybody! —\n(@ChrisYoungMusic) February 14, 2017", 'title': 'Musicians Wish Fans a Happy Valentine’s Day', 'url': 'http://1041jackfm.cbslocal.com/2017/02/14/musicians-happy-valentines-day/'}
{'date': '2017-08-14 00:00:00', 'description': "The lineup for the four tour dates will feature Don Henley, Joe Walsh and Timothy B. Schmit with Frey's son Deacon and Vince Gill.", 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/08/the-eagles-kennedy-center.jpg?w=946', 'text': "By Annie Reuter\nThe Eagles have announced four new concert dates. The lineup for the short run will feature Don Henley, Joe Walsh and Timothy B. Schmit with Frey’s son, Deacon, and Vince Gill filling in for the late Glenn Frey.\nRelated: Vince Gill to Join Eagles for Classic East and West Shows\nAn Evening with the Eagles will stop at the Greensboro Coliseum in North Carolina on October 17 followed by a show a Philips Arena in Atlanta, Georgia. on October 20. The band will return to Louisville, Kentucky on October 24 at KFC Yum! Center before traveling to the late Glenn Frey’s hometown of Detroit, Michigan on October 27 to wrap up the run at Little Caesars Arena.\nTickets for the four new dates go on sale at Saturday (Aug. 19) at 10 am. An American Express card member pre-sale starts on Tuesday while VIP packages will be available through Eagles.com.\nThe four new tour dates follow the success of the band’s Classic West and East shows, earlier this summer. Deacon and Gill also played with the band for those shows.\n“Bringing Deacon in was my idea,” Don Henley told the LA Times. “I think of the guild system, which in both Eastern and Western cultures is a centuries-old tradition of the father passing down the trade to his son, and to me, that makes perfect moral and ethical sense. The primary thing is I think Glenn would be good with it—with both of these guys. I think he’d go, ‘That’s the perfect way to do this.'”", 'title': 'The Eagles Add Four New Dates to 2017 Tour " 104.1 Jack FM', 'url': 'http://1041jackfm.cbslocal.com/2017/08/14/the-eagles-2017-tour-dates/'}
{'date': '2017-02-14 00:00:00', 'description': "George Michael's manager, Michael Lippman, wanted the three artists as well as host James Corden to perform a mashup of Michael's hits.", 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/02/grammy-show-60.jpg?w=946', 'text': 'By Annie Reuter\nAdele’s tribute to George Michael on Sunday (Feb. 12) at the GRAMMY Awards was a memorable one despite an early glitch that caused her to start the song over. But the original plans for the performance were far different. At one point the tribute could have included Beyoncé and Rihanna.\nRelated: Adele Tributes George Michael after Rocky Start at 2017 GRAMMYs\nGeorge Michael’s manager, Michael Lippman, wanted the three artists as well as host James Corden to perform a mashup of Michael’s hits including “Freedom” and “One More Try,” GRAMMY executive producer Ken Ehrlich told Billboard,\nThat decision shifted when Lippman found out “how passionate Adele was,” Ehrlich says, “and that she had a vision for what she wanted to do with it.” Adele would go on to perform a ballad version of Michael’s 1996 hit “Fastlove” backed by an orchestra. It was a song she recalls hearing for the first time at the age of 10 and she instantly “heard the vulnerability in that song,” Ehrlich says.\nBackstage following her emotional GRAMMY performance, Adele raved about Michael, adding that “it was an honor” to show her respects to the singer. Later that night, Adele won GRAMMYs for Record of the Year and Album of the Year.', 'title': 'George Michael GRAMMY Tribute Plans Included Beyoncé, Rihanna', 'url': 'http://1041jackfm.cbslocal.com/2017/02/14/george-michael-grammy-tribute-plans-beyonce-rihanna/'}
{'date': '2017-02-15 00:00:00', 'description': '"In these trying times we need some fun. We’re very serious about fun.”', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/02/deborah-harry-getty.jpg?w=946', 'text': 'By Jon Wiederhorn\nBlondie have released a video for “Fun,” the first single from their upcoming album Pollinator. Directed by Beyoncé collaborator Dikyal Rimmasch, the clip lives up to the song’s title, featuring the band in a space ship, wandering another planet and animated footage of galactic travel contrasted with performance shots and images of people partying on the dance floor.\nRelated: Blondie and Garbage Announce Co-Headlining Summer Tour\nGuitarist Chris Stein said the interstellar theme was an effort to escape the stress and turmoil of modern day events. “Quoting Emma Goldman, ‘If I can’t dance I don’t want to be part of your revolution.’ In these trying times we need some fun. We’re very serious about fun,” he told NME.\n“The video was shot in two places,” he added. “The color stuff was all shot in LA without us and the black and white stuff was shot in New York. It’s got some good cameos—it’s got a pretty big cameo from Raja, who’s one of the Drag Race superstars, and little cameos from Grace McKagan, Duff McKagan’s daughter who has a band called The Pink Slips, and also Tony Maserati, who was a producer and mixer for us.”\nStein and vocalist Debbie Harry wrote the upbeat, disco-inflected “Fun” with TV On The Radio member Dave Sitek. Pollinator, which is scheduled for release May 5, also features writing by Johnny Marr, Sia, Charli XCX, The Strokes guitarist Nick Valensi and Dev Hynes. It will be Blondie’s first record since 2014’s Ghost of Download.', 'title': 'Blondie Release Galactic Video for New Song ‘Fun’', 'url': 'http://1041jackfm.cbslocal.com/2017/02/15/blondie-video-new-song-fun/'}
{'date': '2017-06-14 00:00:00', 'description': 'Check out the latest from the Las Vegas rockers.', 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://s0.wp.com/wp-content/themes/vip/cbs-local/images/global/facebook/facebook-share-260x260.png', 'text': 'The Killers have released a new single titled “The Man” from their forthcoming album, Wonderful Wonderful.\nRelated: The Killers Perform New Track, ‘Run for Cover’\nThe track is the first new music from the band since their 2012 album Battle Born (assuming you don’t count their 2016 Christmas album Don’t Waste Your Wishes). Hopefully, this signals a fifth studio effort is imminent. “The Man” was recorded with the producer Jacknife Lee during album sessions in Las Vegas and Los Angeles. The song finds frontman Brandon Flowers looking back on his younger self, the persona from their Grammy-nominated debut Hot Fuss, and reconciling that wide-eyed character with the man he is now.\nCheck out the latest from The Killers below.', 'title': 'Listen to The Killers’ New Track ‘The Man’', 'url': 'http://1041jackfm.cbslocal.com/2017/06/14/the-killers-the-man-3/'}
{'date': '2017-06-14 00:00:00', 'description': "The song is featured in the upcoming film 'The Book of Henry.'", 'domain': '1041jackfm.cbslocal.com', 'image_url': 'https://cbs1041jackfm.files.wordpress.com/2017/06/stevienickspress.jpg?w=946', 'text': 'By Abby Hassler\nStevie Nicks debuted a new ballad “Your Hand I Will Never Let Go” today (June 14). The track is featured in the Naomi Wats-drama, The Book of Henry, which will hit theaters this Friday (June 16).\nRelated: Lana Del Rey Taps Stevie Nicks for New Track: Report\nWritten by Thomas Barlett and Ryan Miller, Nicks’ song will fall alongside original music composed by Michael Giacchino on the film’s soundtrack.\n“Drowned in thought and caught in a stare/ Talking to ghosts who were not there,” Nicks sings. “Then you took my hand/ Transformation began/ Commotion where it once was still/ Fireworks explode/ Front row tickets to the show/ This hand I will never let it go.”\nListen to “Your Hand I Will Never Let It Go” below.', 'title': 'Stevie Nicks Debuts New Single ‘Your Hand I Will Never Let It Go’', 'url': 'http://1041jackfm.cbslocal.com/2017/06/14/stevie-nicks-your-hand-i-will-never-let-it-go/'}
</code>
## Getting pseudo-labeled data for training
`0` represents viral language, `1` immigration language, and `2` a blend of the two. These categorizations are fuzzy and inexact and are not the result of manual annotations. They should be improved upon during the training process (or adjusted manually) when possible._____no_output_____
<code>
df = pd.read_csv('https://www.dropbox.com/s/kfbja23kisimedm/immigration.csv?dl=1')
df.head()_____no_output_____X_train, X_valid, y_train, y_valid = train_test_split(df['text'], df['target'], train_size=0.7, random_state=42)_____no_output_____X_valid, y_valid_____no_output_____
</code>
# Baseline 1
Let's use Naive Bayes. For the sake of simplicity, I will not add weighting to the classes here (we probably should!), but sklearn wants its weights to correspond to samples in the train dataset (when using the fit method). So you would need to feed in a list of weights the same length as your samples. Think about the weights in a table corresponding to class like this:
| Sample | Class | Weight |
| --- | --- | --- |
| sample 1 | 1 | 0.05 |
| sample 2 | 2 | 0.8 |
| sample 3 | 1 | 0.05 |
| sample 4 | 0 | 0.15 |
| sample 5 | 2 | 0.8 |_____no_output_____
<code>
vectorizer = TfidfVectorizer()
train_vectorized = vectorizer.fit_transform(X_train)
valid_vectorized = vectorizer.transform(X_valid)
train_vectorized_____no_output_____naive_bayes = MultinomialNB()
naive_bayes.fit(train_vectorized, y_train)_____no_output_____predictions = naive_bayes.predict(valid_vectorized)
predictions_____no_output_____print(f'Accuracy: {accuracy_score(y_valid, predictions)}\n'
f'Precision: {precision_score(y_valid, predictions, average=None)}\n'
f'Recall: {recall_score(y_valid, predictions, average=None)}\n'
f'F1 Score: {f1_score(y_valid, predictions, average=None)}\n')Accuracy: 0.9649855444908448
Precision: [0.99604156 0.95005945 0. ]
Recall: [0.92424242 0.99875 0. ]
F1 Score: [0.95879971 0.97379647 0. ]
# y-axis (rows) == true label and x-axis (columns) == predicted label
confusion_matrix(y_valid, predictions)_____no_output_____
</code>
# Baseline 2_____no_output_____
<code>
model = BertForSequenceClassification.from_pretrained('bert-large-uncased', num_labels=3)
tokenizer = BertTokenizer.from_pretrained('bert-large-uncased')_____no_output_____device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)_____no_output_____# the classes are extremely unbalanced; let's generate weights that we can feed to loss function
unbalanced_weights = 1 / (y_train.value_counts() / len(y_train)).sort_index()
weights = unbalanced_weights / unbalanced_weights.sum()
weights_____no_output_____# I will exclude datasets, dataloaders, etc. for the sake of simplicity
criterion = nn.CrossEntropyLoss(weight=torch.tensor(weights.values).float().to(device))
optimizer = optim.AdamW(model.parameters(), lr=1e-5)
for epoch in range(1): # make this up to 3!
running_loss = 0.
for batch_start in range(0, len(X_train), 4):
X = X_train[batch_start:batch_start + 4].tolist()
y = torch.tensor(y_train[batch_start:batch_start + 4].values).to(device)
predictions = model(**tokenizer(X, return_tensors='pt', padding=True).to(device))
loss = criterion(torch.softmax(predictions.logits, dim=-1), y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
running_loss += loss.item()
print(f'Finished epoch {epoch} with running loss of {running_loss / len(X_train)}')_____no_output_____# make predictions on validation set
valid_predictions = torch.zeros_like(torch.tensor(y_valid.values))
for batch_start in range(0, len(X_valid), 4):
X = X_valid[batch_start:batch_start + 4].tolist()
with torch.no_grad():
predictions = model(**tokenizer(X, return_tensors='pt', padding=True).to(device))
indices = torch.argmax(torch.softmax(predictions.logits, dim=-1), dim=-1)
valid_predictions[batch_start:batch_start + 4] = indices_____no_output_____print(f'Accuracy: {accuracy_score(y_valid, valid_predictions.numpy())}\n'
f'Precision: {precision_score(y_valid, valid_predictions.numpy(), average=None)}\n'
f'Recall: {recall_score(y_valid, valid_predictions.numpy(), average=None)}\n'
f'F1 Score: {f1_score(y_valid, valid_predictions.numpy(), average=None)}\n')_____no_output_____# y-axis (rows) == true label and x-axis (columns) == predicted label
confusion_matrix(y_valid, valid_predictions.numpy())_____no_output_____
</code>
# Your Original System
Improve upon the baselines above. Feel free to copy cells from one of the baselines above, paste it here, and tweak it for improvements. You have several models to select from from sklearn (both for classification and for vectorization of text). And even just trying different architectures for Basline 2 (such as RoBERTa, distilbert, etc.) would help._____no_output_____
<code>
_____no_output_____
</code>
| {
"repository": "hlab-repo/purity-and-danger",
"path": "Immigration.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 40385,
"hexsha": "d0c9f4f0d6b9014cf034517250a57c2deab3f379",
"max_line_length": 2105,
"avg_line_length": 55.7803867403,
"alphanum_fraction": 0.5897734307
} |
# Notebook from peter-cudmore/seminars
Path: ANZIAM-2019/ANZIAM Slides.ipynb
<code>
import matplotlib.pyplot as plt
import BondGraphTools
from BondGraphTools.config import config
from BondGraphTools.reaction_builder import Reaction_Network
julia = config.julia_____no_output_____
</code>
# `BondGraphTools`
## Modelling Network Bioenergetics.
https://github.com/peter-cudmore/seminars/ANZIAM-2019
Dr. Peter Cudmore.
Systems Biology Labratory,
The School of Chemical and Biomedical Engineering,
The University of Melbourne. _____no_output_____In this talk i discuss
* Problems in compartmental modelling of cellular processes,
* Some solutions from engineering and physics,
* Software to make life easier.
For this talk; a *model* is a set of ordinary differential equations._____no_output_____## Part 1: Problems in Cellular Process Modelling._____no_output_____<center> <img src="images/map.png"> </center>_____no_output_____<center><i>Parameters (or the lack thereof) are the bane of the mathematical biologists existence!</i></center>_____no_output_____Consinder $S + E = ES = E + P$ (one 'edge' of metabolism).
Assuming mass action, the dynamics are:
$$\begin{align}
\dot{[S]} &= k_1^b[ES] - k_1^f[E][S],\\
\dot{[E]} &= (k_1^b + k_2^f)[ES] - [E](k_1^f[S] + k_2^b[P]),\\
\dot{[ES]} &= -(k_1^b + k_2^f)[ES] + [E](k_1^f[S] + k_2^b[P]),\\
\dot{[P]} &= k_2^f[ES] - k_2^f[E][P].
\end{align}
$$_____no_output_____*What are the kinetic parameters $k_1^b, k_1^f, k_2^b, k_2^f$?*_____no_output_____*How do we find them for large systems?*_____no_output_____Can kinetic parameters be estimated from available data?
_____no_output_____<center><h3>No!</h3></center> _____no_output_____
When fitting data to a system of kinetics parameters may be:
- unobservable (for example, rate constants from equilibrium data),
- or *sloppy* (aways underdetermined for a set of data)._____no_output_____Do kinetic parameters generalise across different experiments (and hence can be tabulated)?
_____no_output_____<center><h3>No!</h3></center> _____no_output_____
At the very least kinetic parameters fail to generalise across temperatures.
For example; physiological conditions are around $37^\circ C$ while labs are around $21^\circ C$._____no_output_____Do kinetic parameters violate the laws physics?_____no_output_____
<center><h3>Often!</h3></center>
_____no_output_____
_The principal of detailed balance_ requires that at equilibirum each simple process should be balanced by its reverse process.
This puts constraints on the kinetic parameters which are often broken, for example, by setting $k^b_i = 0$ (no back reaction for that step)._____no_output_____An alternative is to think of kinetic parameters as derived from physical constants!
For instance:
- Oster, G. and Perelson, A. and Katchalsy, A. *Network Thermodynamics*. Nature 1971; 234:5329.
- Erderer, M. and Gilles, E. D. *Thermodynamically Feasible Kinetic Models of Reaction Networks*. Biophys J. 2007; 92(6): 1846–1857.
- Saa, P. A. and Nielsen, L. K. *Construction of feasible and accurate kinetic models of metabolism: A Bayesian approach.* Sci Rep. 2016;6:29635._____no_output_____# Part 2: A Solution via Network Thermodynamics_____no_output_____<center> <img src="images/map.png"> </center>_____no_output_____### Network Thermodynamics
Partitions $S + B = P$ into _components_.
1. Energy Storage.
2. Dissipative Processes.
3. Conservation Laws.
Here:
- Chem. potential acts like _pressure_ or _voltage_.
- Molar flow acts like _velocity_ or _current_._____no_output_____
<code>
# Reaction Network from BondGraphTools
reaction = Reaction_Network(
"S + B = P", name="One Step Reaction"
)
model = reaction.as_network_model()
# Basic Visualisation.
from BondGraphTools import draw
figure = draw(model, size=(8,6))_____no_output_____
</code>
### Kinetic parameters from Network Thermodynamics._____no_output_____Partitions $S + B = P$ into _components_.
1. Energy Storage Compartments:
$S$, $B$ and $P$.
2. Dissipative Processes: (The chemical reaction itself).
3. Conservation Laws: (The flow from A is the flow from B and is equal to the flow into reaction)._____no_output_____Gibbs Energy:
$$\mathrm{d}G = P\mathrm{d}v +S\mathrm{d}T + \sum_i\mu_i\mathrm{d}n_i$$
- $\mu_i$ is the Chemical Potential of species $A_i$
- $n_i$ is the amount (in mols) of $A_i$_____no_output_____Chemical Potential is usually modelled as
$$\mu_i = PT\ln\left(\frac{k_in_i}{V}\right)$$ where $$k_i = \frac{1}{c^\text{ref}}\exp\left[\frac{\mu_i^\text{ref}}{PT}\right]$$_____no_output_____
- $P$, $T$ and $V$ are pressure, temperature and volume respectively.
- $c^\text{ref}$ is the reference concentration (often $10^{-9} \text{mol/L}$).
- $\mu_i^\text{ref}$ is the reference potential._____no_output_____<center><i> The reference potential can be tabulated, approximated and (in somecase) directly measured!</i> </center>_____no_output_____Reaction flow $v$ is assumed to obey the Marcelin-de Donder formula:
$$ v \propto \exp\left(\frac{A_f}{PT}\right) - \exp\left(\frac{A_r}{PT}\right)$$
where $A_f, A_r$ are the forward and reverse chemical affinities.
For $S + B = P$, $$A_f = \mu_S + \mu_B \quad \text{and}\quad A_r = \mu_P.$$
_____no_output_____### Kinetic parameters from Network Thermodynamics.
For the equation $S + B= P$, in thermodynamic parameters:
$$
\dot{[P]} = \kappa(k_Sk_B[S][B] - k_P[P])
$$_____no_output_____Here $$k^f = \kappa k_Sk_B, \quad k^b = \kappa k_P, \quad \text{with} \quad k_i = \frac{1}{c^\text{ref}}\exp\left[\frac{\mu_i^\text{ref}}{PT}\right], \quad \kappa > 0.$$_____no_output_____*$k^f$ and $k^b$ are now related to physical constants!*_____no_output_____# Part 3: Network Thermodynamics with `BondGraphTools`_____no_output_____<center> <img src="images/map.png"> </center>_____no_output_____
<code>
import BondGraphTools
help(BondGraphTools)Help on package BondGraphTools:
NAME
BondGraphTools
DESCRIPTION
BondGraphTools
==============
BondGraphTools is a python library for symbolic modelling and control of multi-physics
systems using bond graphs. Bond graph modelling is a network base framework concerned
with the distribution of energy and the flow of power through lumped-element or
compartmental models of physical system.
Package Documentation::
https://bondgraphtools.readthedocs.io/
Source::
https://github.com/BondGraphTools/BondGraphTools
Bug reports:
https://github.com/BondGraphTools/BondGraphTools/issues
Simple Example
--------------
Build and simulate a RLC driven RLC circuit::
import BondGraphTools as bgt
# Create a new model
model = bgt.new(name="RLC")
# Create components
# 1 Ohm Resistor
resistor = bgt.new("R", name="R1", value=1.0)
# 1 Henry Inductor
inductor = bgt.new("L", name="L1", value=1.0)
# 1 Farad Capacitor
capacitor = bgt.new("C", name="C1", value=1.0)
# Conservation Law
law = bgt.new("0") # Common voltage conservation law
# Connect the components
connect(law, resistor)
connect(law, capacitor)
connect(law, inductor)
# produce timeseries data
t, x = simulate(model, x0=[1,1], timespan=[0, 10])
Bugs
----
Please report any bugs `here <https://github.com/BondGraphTools/BondGraphTools/issues>`_,
or fork the repository and submit a pull request.
License
-------
Released under the Apache 2.0 License::
Copyright (C) 2018
Peter Cudmore <[email protected]>
PACKAGE CONTENTS
actions
algebra
atomic
base
component_manager
compound
config
ds_model
exceptions
fileio
port_hamiltonian
reaction_builder
sim_tools
version
view
FILE
/Users/pete/Workspace/BondGraphTools/BondGraphTools/__init__.py
</code>
### Network Energetics
Network Energetics, which includes Network Thermodynamics partitions $S + B = P$ into _components_.
1. Energy Storage Compartments: $C: S$, $C: B$ and $C: P$.
2. Dissipative Processes: $R: r_{1;0}$
3. Conservation Laws: $1$ (common flow).
*Bond Graphs represent of energy networks.*_____no_output_____
<code>
#from BondGraphTools.reaction_builder
# import Reaction_Network
reaction = Reaction_Network(
"S + B = P", name="One Step Reaction"
)
model = reaction.as_network_model()
# Basic Visualisation.
from BondGraphTools import draw
figure = draw(model, size=(8,6))
_____no_output_____# Initialise latex printing
from sympy import init_printing
init_printing()
# Print the equations of motion
model.constitutive_relations_____no_output_____figure_____no_output_____for v in model.state_vars:
(component, local_v) = model.state_vars[v]
meta_data = component.state_vars[local_v]
print(f"{v} is {component}\'s {meta_data}")x_0 is C: S's Molar Amount
x_1 is C: B's Molar Amount
x_2 is C: P's Molar Amount
#
#
figure_____no_output_____
</code>
### A peak under the hood
In `BondGraphTools`, components have a number of power ports defined such that the power $P_i$ entering the $i$th port is $P_i = e_if_i$
The power vaiables $(e,f)$ are related to the component state variables $(x, \mathrm{d}x)$ by _constitutive relations_._____no_output_____
<code>
from BondGraphTools import new
chemical_potential = new("Ce", library="BioChem")
chemical_potential.constitutive_relations_____no_output_____figure_____no_output_____
</code>
##### A peak under the hood (cont.)
The harpoons represent _shared power variables_.
For example the harpoon from port 1 on $R: r_{1;0}$ to $C:P$ indiciates that
$$
f^{C:P}_0 = - f^{R:r_{1;0}}_1 \qquad
e^{C:P}_0 = e^{R:r_{1;0}}_1
$$
so that power is conserved through the connection
$$
P^{C:P}_0 + P^{R:r_{1;0}}_1 = 0
$$
_____no_output_____
<code>
figure_____no_output_____
</code>
##### A peak under the hood (cont.)
The resulting system is of the form:
$$
LX + V(X) = 0
$$
where
$$
X = \left(\frac{\mathrm{d}\mathbf{x}}{\mathrm{d}t},
\mathbf{e},
\mathbf{f},
\mathbf{x},
\mathbf{u}\right)^T
$$
- $\mathbf{e},\mathbf{f}$ vectors of power variables
- $\mathrm{d}\mathbf{x}, \mathbf{x}$ similarly state variables.
- and $\mathbf{u}$ control variables
- $L$ is a sparse matrix
- $V$ is a nonlinear vector field._____no_output_____### So what does this have to do with parameters?
Having a network energetics library allows for:
- automation via dataflow scripting (eq; `request`, `xlrd`),
- 'computational modularity' for model/parameter reuse,
- intergration with existing parameter estimators,
- use with your favourite data analysis technology.
*Enables the tabuation of paramaters!*_____no_output_____### Why Python?
- open source, commonly available and commonly used
- 'executable pseudocode' (easy to read) and excellent for rapid development
- great libraries for both science and general purpose computing
- excellent quality management tools
- pacakge management, version control_____no_output_____*`BondGraphTools` is developed with sustainable development practices.*_____no_output_____## `BondGraphTools` now and in the future._____no_output_____#### Current Version
Version 0.3.6 (on PyPI) is being used by in the Systems Biology Lab:
- Prof. Peter Gawrthop (mitochondrial electron transport).
- Michael Pan (ionic homeostasis).
- Myself (coupled oscillators, synthetic biology)_____no_output_____#### Features Already Implemented
- Component libraries, (mechotronic and biochem)
- Numerical simulations,
- Control variables,
- Stiochiometric analysis_____no_output_____The big challenge: _scaling up_!_____no_output_____# Thanks for Listening!
Thanks to:
- Prof. Edmund Crampin Prof. Peter Gawthrop, Michael Pan & The Systems Biology Lab.
- The University of Melbourne
- The ARC Center of Excellence for Convergent Bio-Nano Science.
- ANZIAM Organisers and sessions chairs, and Victoria University._____no_output_____Please check out `BondGraphTools`
- documentation at [bondgraphtools.readthedocs.io](http://bondgraphtools.readthedocs.io)
- source at [https://github.com/BondGraphTools](https://github.com/BondGraphTools)_____no_output_____
| {
"repository": "peter-cudmore/seminars",
"path": "ANZIAM-2019/ANZIAM Slides.ipynb",
"matched_keywords": [
"synthetic biology",
"biology",
"systems biology"
],
"stars": null,
"size": 132189,
"hexsha": "d0cc72a50408427c3e38a0d35618ae48a9837f2c",
"max_line_length": 16808,
"avg_line_length": 110.3414023372,
"alphanum_fraction": 0.8697924941
} |
# Notebook from kbroman/Teaching_CTC2019tutorial
Path: CTC2019_tutorial.ipynb
# Mapping QTL in BXD mice using R/qtl2
[Karl Broman](https://kbroman.org)
[<img style="display:inline-block;" src="https://orcid.org/sites/default/files/images/orcid_16x16(1).gif">](https://orcid.org/0000-0002-4914-6671),
[Department of Biostatistics & Medical Informatics](https://www.biostat.wisc.edu),
[University of Wisconsin–Madison](https://www.wisc.edu)
Our aim in this tutorial is to demonstrate how to map quantitative trait loci (QTL) in the BXD mouse recombinant inbred lines using the [R/qtl2](https://kbroman.org/qtl2) software. We will first show how to download BXD phenotypes from [GeneNetwork2](http://gn2.genenetwork.org) using its API, via the R package [R/GNapi](https://github.com/rqtl/GNapi). At the end, we will use the [R/qtl2browse](https://github.com/rqtl/qtl2browse) package to display genome scan results using the [Genetics Genome Browser](https://github.com/chfi/purescript-genome-browser)._____no_output_____## Acquiring phenotypes with the GeneNetwork API
We will first use the [GeneNetwork2](http://gn2.genenetwork.org) API to acquire BXD phenotypes to use for mapping. We will use the R package [R/GNapi](https://github.com/rqtl/GNapi).
We first need to install the package, which is not available on [CRAN](https://cran.r-project.org), but is available via a private repository.
```r
install.packages("GNapi", repos="http://rqtl.org/qtl2cran")
```
We then load the package using `library()`._____no_output_____
<code>
library(GNapi)_____no_output_____
</code>
The [R/GNapi](https://github.com/kbroman/GNapi) has a variety of functions. For an overview, see [its vignette](http://kbroman.org/GNapi/GNapi.html). Here we will just do one thing: use the function `get_pheno()` to grab BXD phenotype data. You provide a data set and a phenotype. Phenotype 10038 concerns "habituation", measured as a difference in locomotor activity between day 1 and day 3 in a 5 minute test trial. _____no_output_____
<code>
phe <- get_pheno("BXD", "10038")
head(phe)_____no_output_____
</code>
We will use just the column "value", but we need to include the strain names so that R/qtl2 can line up these phenotypes with the genotypes._____no_output_____
<code>
pheno <- setNames(phe$value, phe$sample_name)
head(pheno)_____no_output_____
</code>
## Acquiring genotype data with R/qtl2
We now want to get genotype data for the BXD panel. We first need to install the [R/qtl2](https://kbroman.org/qtl2) package. As with R/GNapi, it is not available on CRAN, but rather is distributed via a private repository.
```r
install.packages("qtl2", repos="http://rqtl.org/qtl2cran")
```
We then load the package with `library()`._____no_output_____
<code>
library(qtl2)_____no_output_____
</code>
R/qtl2 uses a special file format for QTL data ([described here](https://kbroman.org/qtl2/assets/vignettes/input_files.html)). There are a variety of sample datasets [on Github](https://github.com/rqtl/qtl2data), including genotypes for the [mouse BXD lines](https://github.com/rqtl/qtl2data/tree/master/BXD), taken from [GeneNetwork2](http://gn2.genenetwork.org). We'll load those data directly into R using the function `read_cross2()`._____no_output_____
<code>
bxd_file <- "https://raw.githubusercontent.com/rqtl/qtl2data/master/BXD/bxd.zip"
bxd <- read_cross2(bxd_file)Warning message in recode_geno(sheet, genotypes):
“117497 genotypes treated as missing: "H"”
</code>
We get a warning message about heterozygous genotypes being omitted. A number of the newer BXD lines have considerable heterozygosity. But these lines weren't among those phenotyped in the data we downloaded above, and so we don't need to worry about it here.
The data are read into the object `bxd`, which has class `"cross2"`. It contains the genotypes and well as genetic and physical marker maps. There are also phenotype data (which we will ignore).
We can get a quick summary of the dataset with `summary()`. For reasons that I don't understand, it gets printed as a big mess within this Jupyter notebook, and so here we need to surround it with `print()` to get the intended output._____no_output_____
<code>
print( summary(bxd) )Object of class cross2 (crosstype "risib")
Total individuals 198
No. genotyped individuals 198
No. phenotyped individuals 198
No. with both geno & pheno 198
No. phenotypes 5806
No. covariates 0
No. phenotype covariates 1
No. chromosomes 20
Total markers 7320
No. markers by chr:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 X
636 583 431 460 470 449 437 319 447 317 375 308 244 281 247 272 291 250 310 193
</code>
## QTL mapping in R/qtl2
The first step in QTL analysis is to calculate genotype probabilities at putative QTL positions across the genome, conditional on the observed marker data. This allows us that consider positions between the genotyped markers and to allow for the presence of genotyping errors.
First, we need to define the positions that we will consider. We will take the observed marker positions and insert a set of "pseudomarkers" (marker-like positions that are not actually markers). We do this with the function `insert_pseudomarkers()`. We pull the genetic map (`gmap`) out of the `bxd` data as our basic map; `step=0.2` and `stepwidth="max"` mean to insert pseudomarkers so that no two adjacent markers or pseudomarkers are more than 0.2 cM apart. That is, in any marker interval that is greater than 0.2 cM, we will insert one or more evenly spaced pseudomarkers, so that the intervals between markers and pseudomarkers are no more than 0.2 cM._____no_output_____
<code>
gmap <- insert_pseudomarkers(bxd$gmap, step=0.2, stepwidth="max")_____no_output_____
</code>
We will be interested in results with respect to the physical map (in Mbp), and so we need to create a corresponding map that includes the pseudomarker positions. We do this with the function `interp_map()`, which uses linear interpolation to get estimated positions for the inserted pseudomarkers._____no_output_____
<code>
pmap <- interp_map(gmap, bxd$gmap, bxd$pmap)_____no_output_____
</code>
We can now proceed with calculating genotype probabilities for all BXD strains at all markers and pseudomarkers, conditional on the observed marker genotypes and assuming a 0.2% genotyping error rate. We use the [Carter-Falconer](https://doi.org/10.1007/BF02996226) map function to convert between cM and recombination fractions; it assumes a high degree of crossover interference, appropriate for the mouse._____no_output_____
<code>
pr <- calc_genoprob(bxd, gmap, error_prob=0.002, map_function="c-f")_____no_output_____
</code>
In the QTL analysis, we will fit a linear mixed model to account for polygenic background effects. We will use the "leave one chromosome out" (LOCO) method for this. When we scan a chromosome for a QTL, we include a polygenic term with a kinship matrix derived from all other chromosomes.
We first need to calculate this set of kinship matrices, which we do with the function `calc_kinship()`. The second argument, `"loco"`, indicates that we want to calculate a vector of kinship matrices, each derived from the genotype probabilities but leaving one chromosome out._____no_output_____
<code>
k <- calc_kinship(pr, "loco")_____no_output_____
</code>
Now, finally, we're ready to perform the genome scan, which we do with the function `scan1()`. It takes the genotype probabilities and a set of phenotypes (here, just one phenotype). If kinship matrices are provided (here, as `k`), the scan is performed using a linear mixed model. To make the calculations faster, the residual polygenic variance is first estimated without including any QTL effect and is then taking to be fixed and known during the scan._____no_output_____
<code>
out <- scan1(pr, pheno, k)_____no_output_____
</code>
The output of `scan1()` is a matrix of LOD scores; the rows are marker/pseudomarker positions and the columns are phenotypes. We can plot the results using `plot.scan1()`, and we can just use `plot()` because it uses the class of its input to determine what plot to make.
Here I'm using the package [repr](https://cran.r-project.org/package=repr) to control the height and width of the plot that's created. I installed it with `install.packages("repr")`. You can ignore that part, if you want._____no_output_____
<code>
library(repr)
options(repr.plot.height=4, repr.plot.width=8)
par(mar=c(5.1, 4.1, 0.6, 0.6))
plot(out, pmap)_____no_output_____
</code>
There's a clear QTL on chromosome 8. We can make a plot of just that chromosome with the argument `chr=15`._____no_output_____
<code>
par(mar=c(5.1, 4.1, 0.6, 0.6))
plot(out, pmap, chr=15)_____no_output_____
</code>
Let's create a plot of the phenotype vs the genotype at the inferred QTL. We first need to identify the QTL location, which we can do using `max()`. We then use `maxmarg()` to get inferred genotypes at the inferred QTL._____no_output_____
<code>
mx <- max(out, pmap)
g_imp <- maxmarg(pr, pmap, chr=mx$chr, pos=mx$pos, return_char=TRUE)_____no_output_____
</code>
We can use `plot_pxg()` to plot the phenotype as a function of QTL genotype. We use `swap_axes=TRUE` to have the phenotype on the x-axis and the genotype on the y-axis, rather than the other way around. Here we see that the BB and DD genotypes are completely separated, phenotypically. _____no_output_____
<code>
par(mar=c(5.1, 4.1, 0.6, 0.6))
plot_pxg(g_imp, pheno, swap_axes=TRUE, xlab="Habituation phenotype")_____no_output_____
</code>
## Browsing genome scan results with the Genetics Genome Browser
The [Genetics Genome Browser](https://github.com/chfi/purescript-genome-browser) is a fast, lightweight, [purescript]-based genome browser developed for browsing GWAS or QTL analysis results. We'll use the R package [R/qtl2browse](https://github.com/rqtl/qtl2browse) to view our QTL mapping results in the GGB.
We first need to install the R/qtl2browse package, again from a private [CRAN](https://cran.r-project.org)-like repository.
```r
install.packages("qtl2browse", repos="http://rqtl.org/qtl2cran")
```
We then load the package and use its one function, `browse()`, which takes the `scan1()` output and corresponding physical map (in Mbp). This will open the Genetics Genome Browser in a separate tab in your web browser._____no_output_____
<code>
library(qtl2browse)
browse(out, pmap)_____no_output_____
</code>
| {
"repository": "kbroman/Teaching_CTC2019tutorial",
"path": "CTC2019_tutorial.ipynb",
"matched_keywords": [
"biostatistics"
],
"stars": null,
"size": 127933,
"hexsha": "d0ceaa5acba4d5501fb8c1038d9dd26d02dd7b9d",
"max_line_length": 48388,
"avg_line_length": 247.9321705426,
"alphanum_fraction": 0.8956797699
} |
# Notebook from saponas/hail
Path: hail/python/hail/docs/tutorials/07-matrixtable.ipynb
## MatrixTable Tutorial
If you've gotten this far, you're probably thinking:
- "Can't I do all of this in `pandas` or `R`?"
- "What does this have to do with biology?"
The two crucial features that Hail adds are _scalability_ and the _domain-specific primitives_ needed to work easily with biological data. Fear not! You've learned most of the basic concepts of Hail and now are ready for the bit that makes it possible to represent and compute on genetic matrices: the [MatrixTable](https://hail.is/docs/0.2/hail.MatrixTable.html)._____no_output_____In the last example of the [Table Joins Tutorial](https://hail.is/docs/0.2/tutorials/08-joins.html), the ratings table had a compound key: `movie_id` and `user_id`. The ratings were secretly a movie-by-user matrix!
However, since this matrix is very sparse, it is reasonably represented in a so-called "coordinate form" `Table`, where each row of the table is an entry of the sparse matrix. For large and dense matrices (like sequencing data), the per-row overhead of coordinate reresentations is untenable. That's why we built `MatrixTable`, a 2-dimensional generalization of `Table`._____no_output_____### MatrixTable Anatomy
Recall that `Table` has two kinds of fields:
- global fields
- row fields
`MatrixTable` has four kinds of fields:
- global fields
- row fields
- column fields
- entry fields_____no_output_____Row fields are fields that are stored once per row. These can contain information about the rows, or summary data calculated per row.
Column fields are stored once per column. These can contain information about the columns, or summary data calculated per column.
Entry fields are the piece that makes this structure a matrix -- there is an entry for each (row, column) pair._____no_output_____### Importing and Reading
Like tables, matrix tables can be [imported](https://hail.is/docs/0.2/methods/impex.html) from a variety of formats: VCF, (B)GEN, PLINK, TSV, etc. Matrix tables can also be *read* from a "native" matrix table format. Let's read a sample of prepared [1KG](https://en.wikipedia.org/wiki/1000_Genomes_Project) data._____no_output_____
<code>
import hail as hl
from bokeh.io import output_notebook, show
output_notebook()
hl.utils.get_1kg('data/')_____no_output_____mt = hl.read_matrix_table('data/1kg.mt')
mt.describe()_____no_output_____
</code>
There are a few things to note:
- There is a single column field `s`. This is the sample ID from the VCF. It is also the column key.
- There is a compound row key: `locus` and `alleles`.
- `locus` has type `locus<GRCh37>`
- `alleles` has type `array<str>`
- GT has type `call`. That's a genotype call!_____no_output_____Whereas table expressions could be indexed by nothing or indexed by rows, matrix table expression have four options: nothing, indexed by row, indexed by column, or indexed by row and column (the entries). Let's see some examples._____no_output_____
<code>
mt.s.describe()_____no_output_____mt.GT.describe()_____no_output_____
</code>
### MatrixTable operations
We belabored the operations on tables because they all have natural analogs (sometimes several) on matrix tables. For example:
- `count` => `count_{rows, cols}` (and `count` which returns both)
- `filter` => `filter_{rows, cols, entries}`
- `annotate` => `annotate_{rows, cols, entries}` (and globals for both)
- `select` => `select_{rows, cols, entries}` (and globals for both)
- `transmute` => `transmute_{rows, cols, entries}` (and globals for both)
- `group_by` => `group_{rows, cols}_by`
- `explode` => `expode_{rows, cols}`
- `aggregate` => `aggregate_{rows, cols, entries}`_____no_output_____Some operations are unique to `MatrixTable`:
- The row fields can be accessed as a `Table` with [rows](https://hail.is/docs/0.2/hail.MatrixTable.html#hail.MatrixTable.rows)
- The column fields can be accessed as a `Table` with [cols](https://hail.is/docs/0.2/hail.MatrixTable.html#hail.MatrixTable.cols).
- The entire field space of a `MatrixTable` can be accessed as a coordinate-form `Table` with [entries](https://hail.is/docs/0.2/hail.MatrixTable.html#hail.MatrixTable.entries). Be careful with this! While it's fast to aggregate or query, trying to write this `Table` to disk could produce files _thousands of times larger_ than the corresponding `MatrixTable`.
Let's explore `mt` using these tools. Let's get the size of the dataset._____no_output_____
<code>
mt.count() # (rows, cols)_____no_output_____
</code>
Let's look at the first few row keys (variants) and column keys (sample IDs)._____no_output_____
<code>
mt.rows().select().show()_____no_output_____mt.s.show()_____no_output_____
</code>
Let's investigate the genotypes and the call rate. Let's look at the first few genotypes:_____no_output_____
<code>
mt.GT.show()_____no_output_____
</code>
All homozygous reference, which is not surprising. Let's look at the distribution of genotype calls:_____no_output_____
<code>
mt.aggregate_entries(hl.agg.counter(mt.GT.n_alt_alleles()))_____no_output_____
</code>
Let's compute the overall call rate directly, and then plot the distribution of call rate per variant._____no_output_____
<code>
mt.aggregate_entries(hl.agg.fraction(hl.is_defined(mt.GT)))_____no_output_____
</code>
Here's a nice trick: you can use an aggregator inside `annotate_rows` and it will aggregate over columns, that is, summarize the values in the row using the aggregator. Let's compute and plot call rate per variant._____no_output_____
<code>
mt2 = mt.annotate_rows(call_rate = hl.agg.fraction(hl.is_defined(mt.GT)))
mt2.describe()_____no_output_____p = hl.plot.histogram(mt2.call_rate, range=(0,1.0), bins=100,
title='Variant Call Rate Histogram', legend='Call Rate')
show(p)_____no_output_____
</code>
### Exercise: GQ vs DP
In this exercise, you'll use Hail to investigate a strange property of sequencing datasets.
The `DP` field is the sequencing depth (the number of reads).
Let's first plot a histogram of `DP`:_____no_output_____
<code>
p = hl.plot.histogram(mt.DP, range=(0,40), bins=40, title='DP Histogram', legend='DP')
show(p)_____no_output_____
</code>
Now, let's do the same thing for GQ.
The `GQ` field is the phred-scaled "genotype quality". The formula to convert to a linear-scale confidence (0 to 1) is `10 ** -(mt.GQ / 10)`. GQ is truncated to lie between 0 and 99.
_____no_output_____
<code>
p = hl.plot.histogram(mt.GQ, range=(0,100), bins=100, title='GQ Histogram', legend='GQ')
show(p)_____no_output_____
</code>
Whoa! That's a strange distribution! There's a big spike at 100. The rest of the values have roughly the same shape as the DP distribution, but form a [Dimetrodon](https://en.wikipedia.org/wiki/Dimetrodon). Use Hail to figure out what's going on!_____no_output_____
| {
"repository": "saponas/hail",
"path": "hail/python/hail/docs/tutorials/07-matrixtable.ipynb",
"matched_keywords": [
"biology"
],
"stars": 789,
"size": 12091,
"hexsha": "d0d005b24b0d8e850c77c449c71a5ce157210425",
"max_line_length": 378,
"avg_line_length": 25.9463519313,
"alphanum_fraction": 0.5705896948
} |
# Notebook from gAldeia/itea-python
Path: examples/interacting_with_protodash.ipynb
# Interacting with ProtoDash_____no_output_____In this notebook we'll combine the ProtoDash and the Partial Effects to obtain feature importances on the digits classifications task.
ProtoDash was proposed in _Gurumoorthy, Karthik & Dhurandhar, Amit & Cecchi, Guillermo & Aggarwal, Charu. (2019). Efficient Data Representation by Selecting Prototypes with Importance Weights. 260-269. 10.1109/ICDM.2019.00036_._____no_output_____
<code>
import numpy as np
import pandas as pd
# automatically differentiable implementation of numpy
import jax.numpy as jnp # v0.2.13
import shap #0.34.0
from sklearn.metrics import classification_report
from sklearn import datasets
from sklearn.model_selection import train_test_split
from IPython.display import display
import matplotlib.pyplot as plt
from itea.classification import ITEA_classifier
from itea.inspection import *
from sklearn.preprocessing import OneHotEncoder
from aix360.algorithms.protodash import ProtodashExplainer #0.2.1
import warnings
warnings.filterwarnings(action='ignore', module=r'itea')_____no_output_____digits_data = datasets.load_digits(n_class=10)
X, y = digits_data['data'], digits_data['target']
labels = digits_data['feature_names']
targets = digits_data['target_names']
X /= X.max(axis=1).reshape(-1, 1)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42)
tfuncs = {
'id' : lambda x: x,
'sin': jnp.sin,
'cos': jnp.cos,
'tan': jnp.tan
}
clf = ITEA_classifier(
gens = 100,
popsize = 100,
max_terms = 40,
expolim = (0, 2),
verbose = 10,
tfuncs = tfuncs,
labels = labels,
simplify_method = None,
random_state = 42,
fit_kw = {'max_iter' : 5}
).fit(X_train, y_train)
final_itexpr = clf.bestsol_
final_itexpr.selected_features_gen | smallest fitness | mean fitness | highest fitness | remaining time
----------------------------------------------------------------------------
0 | 0.105569 | 0.105569 | 0.105569 | 9min40sec
10 | 0.105569 | 0.105569 | 0.105569 | 9min8sec
20 | 0.105569 | 0.105669 | 0.107232 | 7min4sec
30 | 0.107232 | 0.111380 | 0.133001 | 6min41sec
40 | 0.133001 | 0.146708 | 0.152120 | 6min11sec
50 | 0.152120 | 0.154530 | 0.227764 | 6min19sec
60 | 0.227764 | 0.227839 | 0.230258 | 5min34sec
70 | 0.324190 | 0.335553 | 0.351621 | 4min23sec
80 | 0.351621 | 0.396259 | 0.428928 | 5min27sec
90 | 0.444722 | 0.467548 | 0.517872 | 2min53sec
print(classification_report(
y_test,
final_itexpr.predict(X_test),
target_names=[str(t) for t in targets]
)) precision recall f1-score support
0 0.54 0.64 0.58 55
1 0.70 0.80 0.75 55
2 0.53 0.44 0.48 52
3 0.29 0.71 0.41 56
4 0.69 0.69 0.69 64
5 0.61 0.52 0.56 73
6 0.58 0.49 0.53 57
7 0.43 0.53 0.48 62
8 0.40 0.19 0.26 52
9 0.27 0.04 0.08 68
accuracy 0.50 594
macro avg 0.51 0.51 0.48 594
weighted avg 0.51 0.50 0.48 594
</code>
We can use the ``ITEA_summarizer`` to inspect the convergence during the evolution. In the cell below, we'll create 3 plots, one for the fitness (classification accuracy), one for the complexity (number of nodes if the IT expression was converted to a symbolic tree) and number of terms (number of IT terms of the solutions in the population for each generation)._____no_output_____
<code>
fig, ax = plt.subplots(3, 1, figsize=(10, 8), sharex=True)
summarizer = ITEA_summarizer(itea=clf).fit(X_train, y_train).plot_convergence(
data=['fitness', 'complexity', 'n_terms'],
ax=ax,
show=False
)
plt.tight_layout()
plt.show()_____no_output_____# features are named pixel_x_y. Lets extract those coordinates and
# paint in a figure to show the selected features
selected_features = np.zeros((8, 8))
for feature_name, feature_importance in zip(
final_itexpr.selected_features_,
np.sum(final_itexpr.feature_importances_, axis=0)
):
x, y = feature_name[-3], feature_name[-1]
selected_features[int(x), int(y)] = feature_importance
fig, axs = plt.subplots(1, 1, figsize=(3,3))
axs.imshow(selected_features, cmap='gray_r')
axs.set_title(f"Selected features")
plt.tight_layout()
plt.show()_____no_output_____onehot_encoder = OneHotEncoder(sparse=False)
onehot_encoded = onehot_encoder.fit_transform(
np.hstack( (X_train, y_train.reshape(-1, 1)) ) )
explainer = ProtodashExplainer()
# call protodash explainer. We'll select 10 prototypes
# S contains indices of the selected prototypes
# W contains importance weights associated with the selected prototypes
(W, S, _) = explainer.explain(onehot_encoded, onehot_encoded, m=10)elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
from matplotlib import cm
fig, axs = plt.subplots(2, 5, figsize=(12,5))
# Showing 10 prototypes
for s, ax in zip(S, fig.axes):
ax.imshow(X_train[s].reshape(8, 8), cmap='gray_r')
ax.set_title(f"Prototype of class {y_train[s]}")
Z = X_train[s].reshape(8, 8)
levels = [0.1, 0.2, 0.4]
norm = cm.colors.Normalize(vmax=abs(Z).max(), vmin=-abs(Z).max())
cmap = cm.PRGn
cset2 = ax.contour(Z, levels, colors='y')
for c in cset2.collections:
c.set_linestyle('solid')
plt.tight_layout()
plt.show()_____no_output_____it_explainer = ITExpr_explainer(
itexpr=final_itexpr,
tfuncs=tfuncs
).fit(X_train, y_train)
fig, axs = plt.subplots(2, 5, figsize=(12,5))
for s, ax in zip(S, fig.axes):
importances = it_explainer.average_partial_effects(X_train[s, :].reshape(1, -1))[y_train[s]]
ax.imshow(importances.reshape(8, 8), cmap='gray_r')
ax.set_title(f"Feature importance\nprototype of class {y_train[s]}")
Z = X_train[s].reshape(8, 8)
levels = [0.1, 0.2, 0.4]
norm = cm.colors.Normalize(vmax=abs(Z).max(), vmin=-abs(Z).max())
cmap = cm.PRGn
cset2 = ax.contour(Z, levels, colors='y')
for c in cset2.collections:
c.set_linestyle('solid')
plt.tight_layout()
plt.show()_____no_output_____shap_explainer = shap.KernelExplainer(
final_itexpr.predict,
shap.sample(pd.DataFrame(X_train, columns=labels), 100)
)
fig, axs = plt.subplots(2, 5, figsize=(12,5))
for s, ax in zip(S, fig.axes):
importances = np.abs(shap_explainer.shap_values(
X_train[s, :].reshape(1, -1), silent=True, l1_reg='num_features(10)'))
ax.imshow(importances.reshape(8, 8), cmap='gray_r')
ax.set_title(f"Feature importance\nprototype of class {y_train[s]}")
Z = X_train[s].reshape(8, 8)
levels = [0.1, 0.2, 0.4]
norm = cm.colors.Normalize(vmax=abs(Z).max(), vmin=-abs(Z).max())
cmap = cm.PRGn
cset2 = ax.contour(Z, levels, colors='y')
for c in cset2.collections:
c.set_linestyle('solid')
plt.tight_layout()
plt.show()_____no_output_____it_explainer = ITExpr_explainer(
itexpr=final_itexpr,
tfuncs=tfuncs
).fit(X_train, y_train)
fig, axs = plt.subplots(2, 5, figsize=(12,5))
for c, ax in zip(final_itexpr.classes_, fig.axes):
c_idx = np.array([i for i in range(len(y_train)) if y_train[i]==c])
importances = it_explainer.average_partial_effects(X_train[c_idx, :])[c]
ax.imshow(importances.reshape(8, 8), cmap='gray_r')
ax.set_title(f"Feature importance\nprototype of class {c}")
Z = X_train[c_idx, :].mean(axis=0).reshape(8, 8)
levels = [0.1, 0.2, 0.4]
norm = cm.colors.Normalize(vmax=abs(Z).max(), vmin=-abs(Z).max())
cmap = cm.PRGn
cset2 = ax.contour(Z, levels, colors='y')
for c in cset2.collections:
c.set_linestyle('solid')
plt.tight_layout()
plt.show()_____no_output_____
</code>
| {
"repository": "gAldeia/itea-python",
"path": "examples/interacting_with_protodash.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 580590,
"hexsha": "d0d1a7f0c5d1f93e517e08933f5f1f7992f99030",
"max_line_length": 128232,
"avg_line_length": 1168.1891348089,
"alphanum_fraction": 0.9529650872
} |
# Notebook from akh20/The-Android-App-Market-on-Google-Play
Path: notebook.ipynb
## 1. Google Play Store apps and reviews
<p>Mobile apps are everywhere. They are easy to create and can be lucrative. Because of these two factors, more and more apps are being developed. In this notebook, we will do a comprehensive analysis of the Android app market by comparing over ten thousand apps in Google Play across different categories. We'll look for insights in the data to devise strategies to drive growth and retention.</p>
<p><img src="https://assets.datacamp.com/production/project_619/img/google_play_store.png" alt="Google Play logo"></p>
<p>Let's take a look at the data, which consists of two files:</p>
<ul>
<li><code>apps.csv</code>: contains all the details of the applications on Google Play. There are 13 features that describe a given app.</li>
<li><code>user_reviews.csv</code>: contains 100 reviews for each app, <a href="https://www.androidpolice.com/2019/01/21/google-play-stores-redesigned-ratings-and-reviews-section-lets-you-easily-filter-by-star-rating/">most helpful first</a>. The text in each review has been pre-processed and attributed with three new features: Sentiment (Positive, Negative or Neutral), Sentiment Polarity and Sentiment Subjectivity.</li>
</ul>_____no_output_____
<code>
# Read in dataset
import pandas as pd
apps_with_duplicates = pd.read_csv('datasets/apps.csv')
# Drop duplicates
apps = apps_with_duplicates.drop_duplicates()
# Print the total number of apps
print('Total number of apps in the dataset = ', len(apps['App']))
# Have a look at a random sample of 5 rows
n = 5
apps.sample(n)Total number of apps in the dataset = 9659
</code>
## 2. Data cleaning
<p>The four features that we will be working with most frequently henceforth are <code>Installs</code>, <code>Size</code>, <code>Rating</code> and <code>Price</code>. The <code>info()</code> function (from the previous task) told us that <code>Installs</code> and <code>Price</code> columns are of type <code>object</code> and not <code>int64</code> or <code>float64</code> as we would expect. This is because the column contains some characters more than just [0,9] digits. Ideally, we would want these columns to be numeric as their name suggests. <br>
Hence, we now proceed to data cleaning and prepare our data to be consumed in our analyis later. Specifically, the presence of special characters (<code>, $ +</code>) in the <code>Installs</code> and <code>Price</code> columns make their conversion to a numerical data type difficult.</p>_____no_output_____
<code>
# List of characters to remove
chars_to_remove = ['+' , ',' , '$']
# List of column names to clean
cols_to_clean = ['Installs' , 'Price']
# Loop for each column
for col in cols_to_clean:
# Replace each character with an empty string
for char in chars_to_remove:
apps[col] = apps[col].str.replace(char, '')
# Convert col to numeric
apps[col] = pd.to_numeric(apps[col])_____no_output_____
</code>
## 3. Exploring app categories
<p>With more than 1 billion active users in 190 countries around the world, Google Play continues to be an important distribution platform to build a global audience. For businesses to get their apps in front of users, it's important to make them more quickly and easily discoverable on Google Play. To improve the overall search experience, Google has introduced the concept of grouping apps into categories.</p>
<p>This brings us to the following questions:</p>
<ul>
<li>Which category has the highest share of (active) apps in the market? </li>
<li>Is any specific category dominating the market?</li>
<li>Which categories have the fewest number of apps?</li>
</ul>
<p>We will see that there are <code>33</code> unique app categories present in our dataset. <em>Family</em> and <em>Game</em> apps have the highest market prevalence. Interestingly, <em>Tools</em>, <em>Business</em> and <em>Medical</em> apps are also at the top.</p>_____no_output_____
<code>
import plotly
plotly.offline.init_notebook_mode(connected=True)
import plotly.graph_objs as go
# Print the total number of unique categories
num_categories = len(apps["Category"].unique())
print('Number of categories = ', num_categories)
# Count the number of apps in each 'Category' and sort them in descending order
num_apps_in_category = apps["Category"].value_counts().sort_values(ascending = False)
data = [go.Bar(
x = num_apps_in_category.index, # index = category name
y = num_apps_in_category.values, # value = count
)]
plotly.offline.iplot(data)_____no_output_____
</code>
## 4. Distribution of app ratings
<p>After having witnessed the market share for each category of apps, let's see how all these apps perform on an average. App ratings (on a scale of 1 to 5) impact the discoverability, conversion of apps as well as the company's overall brand image. Ratings are a key performance indicator of an app.</p>
<p>From our research, we found that the average volume of ratings across all app categories is <code>4.17</code>. The histogram plot is skewed to the right indicating that the majority of the apps are highly rated with only a few exceptions in the low-rated apps.</p>_____no_output_____
<code>
# Average rating of apps
avg_app_rating = apps['Rating'].mean()
print('Average app rating = ', avg_app_rating)
# Distribution of apps according to their ratings
data = [go.Histogram(
x = apps['Rating']
)]
# Vertical dashed line to indicate the average app rating
layout = {'shapes': [{
'type' :'line',
'x0': avg_app_rating,
'y0': 0,
'x1': avg_app_rating,
'y1': 1000,
'line': { 'dash': 'dashdot'}
}]
}
plotly.offline.iplot({'data': data, 'layout': layout})Average app rating = 4.173243045387994
</code>
## 5. Size and price of an app
<p>Let's now examine app size and app price. For size, if the mobile app is too large, it may be difficult and/or expensive for users to download. Lengthy download times could turn users off before they even experience your mobile app. Plus, each user's device has a finite amount of disk space. For price, some users expect their apps to be free or inexpensive. These problems compound if the developing world is part of your target market; especially due to internet speeds, earning power and exchange rates.</p>
<p>How can we effectively come up with strategies to size and price our app?</p>
<ul>
<li>Does the size of an app affect its rating? </li>
<li>Do users really care about system-heavy apps or do they prefer light-weighted apps? </li>
<li>Does the price of an app affect its rating? </li>
<li>Do users always prefer free apps over paid apps?</li>
</ul>
<p>We find that the majority of top rated apps (rating over 4) range from 2 MB to 20 MB. We also find that the vast majority of apps price themselves under \$10.</p>_____no_output_____
<code>
%matplotlib inline
import seaborn as sns
sns.set_style("darkgrid")
apps_with_size_and_rating_present = apps[(apps['Rating'].notnull()) & (apps["Size"].notnull())]
# Subset for categories with at least 250 apps
large_categories = apps_with_size_and_rating_present.groupby('Category').filter(lambda x: len(x) >= 250).reset_index()
# Plot size vs. rating
plt1 = sns.jointplot(x = large_categories['Size'] , y = large_categories['Rating'] , kind = 'hex')
# Subset out apps whose type is 'Paid'
paid_apps = apps_with_size_and_rating_present[apps_with_size_and_rating_present['Type'] == 'Paid']
# Plot price vs. rating
plt2 = sns.jointplot(x = paid_apps['Price'] , y = paid_apps['Rating'] )_____no_output_____
</code>
## 6. Relation between app category and app price
<p>So now comes the hard part. How are companies and developers supposed to make ends meet? What monetization strategies can companies use to maximize profit? The costs of apps are largely based on features, complexity, and platform.</p>
<p>There are many factors to consider when selecting the right pricing strategy for your mobile app. It is important to consider the willingness of your customer to pay for your app. A wrong price could break the deal before the download even happens. Potential customers could be turned off by what they perceive to be a shocking cost, or they might delete an app they’ve downloaded after receiving too many ads or simply not getting their money's worth.</p>
<p>Different categories demand different price ranges. Some apps that are simple and used daily, like the calculator app, should probably be kept free. However, it would make sense to charge for a highly-specialized medical app that diagnoses diabetic patients. Below, we see that <em>Medical and Family</em> apps are the most expensive. Some medical apps extend even up to \$80! All game apps are reasonably priced below \$20.</p>_____no_output_____
<code>
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
fig.set_size_inches(15, 8)
# Select a few popular app categories
popular_app_cats = apps[apps.Category.isin(['GAME', 'FAMILY', 'PHOTOGRAPHY',
'MEDICAL', 'TOOLS', 'FINANCE',
'LIFESTYLE','BUSINESS'])]
# Examine the price trend by plotting Price vs Category
ax = sns.stripplot(x = popular_app_cats['Price'], y = popular_app_cats['Category'], jitter=True, linewidth=1)
ax.set_title('App pricing trend across categories')
# Apps whose Price is greater than 200
apps_above_200 = popular_app_cats[['Category', 'App', 'Price']][popular_app_cats['Price'] > 200]
apps_above_200_____no_output_____
</code>
## 7. Filter out "junk" apps
<p>It looks like a bunch of the really expensive apps are "junk" apps. That is, apps that don't really have a purpose. Some app developer may create an app called <em>I Am Rich Premium</em> or <em>most expensive app (H)</em> just for a joke or to test their app development skills. Some developers even do this with malicious intent and try to make money by hoping people accidentally click purchase on their app in the store.</p>
<p>Let's filter out these junk apps and re-do our visualization. The distribution of apps under \$20 becomes clearer.</p>_____no_output_____
<code>
# Select apps priced below $100
apps_under_100 = popular_app_cats[popular_app_cats['Price'] < 100]
fig, ax = plt.subplots()
fig.set_size_inches(15, 8)
# Examine price vs category with the authentic apps
ax = sns.stripplot(x=apps_under_100['Price'], y=apps_under_100['Category'], data=apps_under_100,
jitter=True, linewidth=1)
ax.set_title('App pricing trend across categories after filtering for junk apps')_____no_output_____
</code>
## 8. Popularity of paid apps vs free apps
<p>For apps in the Play Store today, there are five types of pricing strategies: free, freemium, paid, paymium, and subscription. Let's focus on free and paid apps only. Some characteristics of free apps are:</p>
<ul>
<li>Free to download.</li>
<li>Main source of income often comes from advertisements.</li>
<li>Often created by companies that have other products and the app serves as an extension of those products.</li>
<li>Can serve as a tool for customer retention, communication, and customer service.</li>
</ul>
<p>Some characteristics of paid apps are:</p>
<ul>
<li>Users are asked to pay once for the app to download and use it.</li>
<li>The user can't really get a feel for the app before buying it.</li>
</ul>
<p>Are paid apps installed as much as free apps? It turns out that paid apps have a relatively lower number of installs than free apps, though the difference is not as stark as I would have expected!</p>_____no_output_____
<code>
trace0 = go.Box(
# Data for paid apps
y=apps[apps['Type'] == 'Paid']['Installs'],
name = 'Paid'
)
trace1 = go.Box(
# Data for free apps
y=apps[apps['Type'] == 'Free']['Installs'],
name = 'Free'
)
layout = go.Layout(
title = "Number of downloads of paid apps vs. free apps",
yaxis = dict(
type = 'log',
autorange = True
)
)
# Add trace0 and trace1 to a list for plotting
data = [trace0 , trace1]
plotly.offline.iplot({'data': data, 'layout': layout})_____no_output_____
</code>
## 9. Sentiment analysis of user reviews
<p>Mining user review data to determine how people feel about your product, brand, or service can be done using a technique called sentiment analysis. User reviews for apps can be analyzed to identify if the mood is positive, negative or neutral about that app. For example, positive words in an app review might include words such as 'amazing', 'friendly', 'good', 'great', and 'love'. Negative words might be words like 'malware', 'hate', 'problem', 'refund', and 'incompetent'.</p>
<p>By plotting sentiment polarity scores of user reviews for paid and free apps, we observe that free apps receive a lot of harsh comments, as indicated by the outliers on the negative y-axis. Reviews for paid apps appear never to be extremely negative. This may indicate something about app quality, i.e., paid apps being of higher quality than free apps on average. The median polarity score for paid apps is a little higher than free apps, thereby syncing with our previous observation.</p>
<p>In this notebook, we analyzed over ten thousand apps from the Google Play Store. We can use our findings to inform our decisions should we ever wish to create an app ourselves.</p>_____no_output_____
<code>
# Load user_reviews.csv
reviews_df = pd.read_csv('datasets/user_reviews.csv')
# Join and merge the two dataframe
merged_df = pd.merge(apps, reviews_df, on = 'App', how = "inner")
# Drop NA values from Sentiment and Translated_Review columns
merged_df = merged_df.dropna(subset=['Sentiment', 'Translated_Review'])
sns.set_style('ticks')
fig, ax = plt.subplots()
fig.set_size_inches(11, 8)
# User review sentiment polarity for paid vs. free apps
ax = sns.boxplot(x = merged_df['Type'], y = merged_df['Sentiment_Polarity'], data = merged_df)
ax.set_title('Sentiment Polarity Distribution')_____no_output_____
</code>
| {
"repository": "akh20/The-Android-App-Market-on-Google-Play",
"path": "notebook.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 591921,
"hexsha": "d0d25338139e52b8437b6884e240904a93a9bb45",
"max_line_length": 591921,
"avg_line_length": 591921,
"alphanum_fraction": 0.7638333494
} |
# Notebook from Avriliar/data_analysis
Path: charpter7_erercise.ipynb
<code>
import numpy as np
import pandas as pd_____no_output_____string_data=pd.Series(['aardvark','artichoke',np.nan,'avocado'])_____no_output_____string_data_____no_output_____string_data.isnull()_____no_output_____string_data[0]=None_____no_output_____string_data.isnull()_____no_output_____from numpy import nan as NA_____no_output_____data=pd.Series([1,NA,3.5,NA,7])_____no_output_____data.dropna()_____no_output_____data[data.notnull()]_____no_output_____data=pd.DataFrame([[1.,6.5,3.],[1.,NA,NA],[NA,NA,NA],[NA,6.5,3.]])_____no_output_____cleaned=data.dropna()_____no_output_____data_____no_output_____cleaned_____no_output_____data.dropna(how='all')_____no_output_____data[4]=NA_____no_output_____data_____no_output_____data.dropna(axis=1,how='all')_____no_output_____df=pd.DataFrame(np.random.randn(7,3))_____no_output_____df.iloc[:4,1]=NA_____no_output_____df.iloc[:2,2]=NA_____no_output_____df_____no_output_____df.dropna()_____no_output_____df.dropna(thresh=2)_____no_output_____df.fillna(0)_____no_output_____df.fillna({1:0.5,2:0})_____no_output______=df.fillna(0,inplace=True)_____no_output_____df_____no_output_____df=pd.DataFrame(np.random.randn(6,3))_____no_output_____df.iloc[2:,1]_____no_output_____df.iloc[4:,2]_____no_output_____df_____no_output_____df.fillna(method='ffill')_____no_output_____df.fillna(method='ffill',limit=2)_____no_output_____data=pd.Series([1.,NA,3.5,NA,7])_____no_output_____df.fillna(data.mean())_____no_output_____data=pd.DataFrame({'k1':['one','two']*3+['two'],'k2':[1,1,2,3,3,4,4]})_____no_output_____data.duplicated()_____no_output_____data.drop_duplicates()_____no_output_____data['v1']=range(7)_____no_output_____data.drop_duplicates(['k1'])_____no_output_____data.drop_duplicates(['k1','k2'],keep='last')_____no_output_____data = pd.DataFrame({'food': ['bacon', 'pulled pork', 'bacon',
....: 'Pastrami', 'corned beef', 'Bacon',
....: 'pastrami', 'honey ham', 'nova lox'],
....: 'ounces': [4, 3, 12, 6, 7.5, 8, 3, 5, 6]})_____no_output_____data_____no_output_____meat_to_animal = {
'bacon': 'pig',
'pulled pork': 'pig',
'pastrami': 'cow',
'corned beef': 'cow',
'honey ham': 'pig',
'nova lox': 'salmon'
}_____no_output_____lowercased = data['food'].str.lower()= data['food'].str.lower()_____no_output_____lowercased_____no_output_____data['animal'] = lowercased.map(meat_to_animal)_____no_output_____data_____no_output_____data = pd.Series([1., -999., 2., -999., -1000., 3.])_____no_output_____data_____no_output_____data.replace([-999, -1000], np.nan)_____no_output_____data.replace([-999, -1000], [np.nan, 0])_____no_output_____data.replace({-999: np.nan, -1000: 0})_____no_output_____data = pd.DataFrame(np.arange(12).reshape((3, 4)),
....: index=['Ohio', 'Colorado', 'New York'],
....: columns=['one', 'two', 'three', 'four'])_____no_output_____transform = lambda x: x[:4].upper()_____no_output_____data.index.map(transform)_____no_output_____data.index = data.index.map(transform)_____no_output_____data_____no_output_____data.rename(index=str.title, columns=str.upper)_____no_output_____data.rename(index={'OHIO': 'INDIANA'},
....: columns={'three': 'peekaboo'})_____no_output_____data.rename(index={'OHIO':'INDIANA'},inplace=True)_____no_output_____data_____no_output_____ages=[20,22,25,27,21,23,37,31,61,45,41,32]_____no_output_____bins=[18,25,35,60,100]_____no_output_____cats=pd.cut(ages,bins)_____no_output_____cats_____no_output_____cats.codes_____no_output_____cats.categories_____no_output_____pd.value_counts(cats)_____no_output_____pd.value_counts(cats)_____no_output_____pd.cut(ages,[18,25,35,60,100],right=False)_____no_output_____group_names = ['Youth', 'YoungAdult', 'MiddleAged', 'Senior']_____no_output_____pd.cut(ages,bins,labels=group_names)_____no_output_____data=np.random.rand(20)_____no_output_____pd.cut(data,4,precision=2)_____no_output_____data=np.random.randn(1000)_____no_output_____cats=pd.qcut(data,4)_____no_output_____cats_____no_output_____pd.value_counts(cats)_____no_output_____pd.qcut(data,[0,0.1,0.5,0.9,1.])_____no_output_____data=pd.DataFrame(np.random.randn(1000,4))_____no_output_____data.describe()_____no_output_____col=data[2]_____no_output_____col[np.abs(col)>3]_____no_output_____data[(np.abs(data)>3).any(1)]_____no_output_____data[(np.abs(data)>3)]=np.sign(data)*3_____no_output_____data.describe()_____no_output_____np.sign(data).head()_____no_output_____df=pd.DataFrame(np.arange(5*4).reshape((5,4)))_____no_output_____sampler=np.random.permutation(5)_____no_output_____sampler_____no_output_____df_____no_output_____df.take(sampler)_____no_output_____df.sample(n=3)_____no_output_____choices=pd.Series([5,7,-1,6,4])_____no_output_____draws=choices.sample(n=10,replace=True)_____no_output_____draws_____no_output_____df = pd.DataFrame({'key': ['b', 'b', 'a', 'c', 'a', 'b'],
.....: 'data1': range(6)})_____no_output_____pd.get_dummies(df['key'])_____no_output_____dummies=pd.get_dummies(df['key'],prefix='key')_____no_output_____df_with_dummy=df[['data1']].join(dummies)_____no_output_____df_with_dummy_____no_output_____mnames=['movie_id','title','genres']_____no_output_____movies = pd.read_table('datasets/movielens/movies.dat', sep='::',
.....: header=None, names=mnames)c:\users\zusi\appdata\local\programs\python\python37\lib\site-packages\ipykernel_launcher.py:2: FutureWarning: read_table is deprecated, use read_csv instead.
c:\users\zusi\appdata\local\programs\python\python37\lib\site-packages\ipykernel_launcher.py:2: ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support regex separators (separators > 1 char and different from '\s+' are interpreted as regex); you can avoid this warning by specifying engine='python'.
movies[:10]_____no_output_____all_genres=[]_____no_output_____for x in movies.genres:all_genres.extend(x.split('|'))_____no_output_____genres=pd.unique(all_genres)_____no_output_____genres_____no_output_____zero_matrix=np.zeros((len(movies),len(genres)))_____no_output_____dummies=pd.DataFrame(zero_matrix,columns=genres)_____no_output_____gen=movies.genres[0]_____no_output_____gen.split('|')_____no_output_____dummies.columns.get_indexer(gen.split('|'))_____no_output_____for i, gen in enumerate(movies.genres):
indices = dummies.columns.get_indexer(gen.split('|'))
dummies.iloc[i, indices] = 1_____no_output_____movies_windic = movies.join(dummies.add_prefix('Genre_'))_____no_output_____movies_windic.iloc[0]_____no_output_____np.random.seed(12345)_____no_output_____values = np.random.rand(10)_____no_output_____values_____no_output_____bins = [0, 0.2, 0.4, 0.6, 0.8, 1]_____no_output_____pd.get_dummies(pd.cut(values, bins))_____no_output_____val = 'a,b, guido'_____no_output_____val.split(',')_____no_output_____pieces = [x.strip() for x in val.split(',')]_____no_output_____pieces _____no_output_____first, second, third = pieces_____no_output_____first + '::' + second + '::' + third_____no_output_____'::'.join(pieces)_____no_output_____'guido' in val_____no_output_____val.index(',')_____no_output_____val.find(':')_____no_output_____val.index(':')_____no_output_____val.count(',')_____no_output_____val.replace(',', '::')_____no_output_____val.replace(',', '')_____no_output_____import re_____no_output_____text = "foo bar\t baz \tqux"_____no_output_____re.split('\s+', text)_____no_output_____regex = re.compile('\s+')_____no_output_____regex.split(text)_____no_output_____regex.findall(text)_____no_output_____text = """Dave [email protected]
Steve [email protected]
Rob [email protected]
Ryan [email protected]
"""
pattern = r'[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}'
# re.IGNORECASE makes the regex case-insensitive
regex = re.compile(pattern, flags=re.IGNORECASE)_____no_output_____regex.findall(text)_____no_output_____m = regex.search(text)_____no_output_____m_____no_output_____text[m.start():m.end()]_____no_output_____print(regex.match(text))None
print(regex.sub('REDACTED', text))Dave REDACTED
Steve REDACTED
Rob REDACTED
Ryan REDACTED
pattern = r'([A-Z0-9._%+-]+)@([A-Z0-9.-]+)\.([A-Z]{2,4})'_____no_output_____regex = re.compile(pattern, flags=re.IGNORECASE)_____no_output_____m = regex.match('[email protected]')_____no_output_____m.groups()_____no_output_____regex.findall(text)_____no_output_____print(regex.sub(r'Username: \1, Domain: \2, Suffix: \3',text))
Dave Username: dave, Domain: google, Suffix: com
Steve Username: steve, Domain: gmail, Suffix: com
Rob Username: rob, Domain: gmail, Suffix: com
Ryan Username: ryan, Domain: yahoo, Suffix: com
data = {'Dave': '[email protected]', 'Steve': '[email protected]',
.....: 'Rob': '[email protected]', 'Wes': np.nan}_____no_output_____data = pd.Series(data)_____no_output_____data_____no_output_____data.isnull()_____no_output_____data.str.contains('gmail')_____no_output_____pattern_____no_output_____data.str.findall(pattern, flags=re.IGNORECASE)_____no_output_____matches = data.str.match(pattern, flags=re.IGNORECASE)_____no_output_____matches_____no_output_____matches.str.get(1)_____no_output_____matches.str[0]_____no_output_____data.str[:5]_____no_output_____
</code>
| {
"repository": "Avriliar/data_analysis",
"path": "charpter7_erercise.ipynb",
"matched_keywords": [
"Salmon"
],
"stars": null,
"size": 122416,
"hexsha": "d0d57e97b08b94e36c32d9528d89c220f6cf524e",
"max_line_length": 356,
"avg_line_length": 23.7562584902,
"alphanum_fraction": 0.3531891256
} |
# Notebook from longr/biobb_wf_cwl_tutorial
Path: biobb_wf_cwl_tutorial/notebooks/biobb_CWL_tutorial.ipynb
# Common Workflow Language with BioExcel Building Blocks
### Based on the Protein MD Setup tutorial using BioExcel Building Blocks (biobb)
***
This tutorial aims to illustrate the process of **building up a CWL workflow** using the **BioExcel Building Blocks library (biobb)**. The tutorial is based on the **Protein Gromacs MD Setup** [Jupyter Notebook tutorial](https://github.com/bioexcel/biobb_wf_md_setup).
***
**Biobb modules** used:
- [biobb_io](https://github.com/bioexcel/biobb_io): Tools to fetch biomolecular data from public databases.
- [biobb_model](https://github.com/bioexcel/biobb_model): Tools to model macromolecular structures.
- [biobb_md](https://github.com/bioexcel/biobb_md): Tools to setup and run Molecular Dynamics simulations.
- [biobb_analysis](https://github.com/bioexcel/biobb_analysis): Tools to analyse Molecular Dynamics trajectories.
**Software requirements**:
- [cwltool](https://github.com/common-workflow-language/cwltool): Common Workflow Language tool description reference implementation.
- [docker](https://www.docker.com/): Docker container platform.
***
### Tutorial Sections:
1. [CWL workflows: Brief Introduction](#intro)
2. [BioExcel building blocks TOOLS CWL Descriptions](#tools)
* [Tool Building Block CWL Sections](#toolcwl)
* [Complete Pdb Building Block CWL description](#pdbcwl)
3. [BioExcel building blocks WORKFLOWS CWL Descriptions](#workflows)
* [Header](#cwlheader)
* [Inputs](#inputs)
* [Outputs](#outputs)
* [Steps](#steps)
* [Input of a Run](#run)
* [Complete Workflow](#wf)
* [Running the CWL workflow](#runwf)
* [Cwltool workflow output](#wfoutput)
4. [Protein MD-Setup CWL workflow with BioExcel building blocks](#mdsetup)
* [Steps](#mdsteps)
* [Inputs](#mdinputs)
* [Outputs](#mdoutputs)
* [Complete Workflow](#mdworkflow)
* [Input of a Run](#mdrun)
* [Running the CWL workflow](#mdcwlrun)
5. [Questions & Comments](#questions)_____no_output_____***
<img src="logo.png" />
***_____no_output_____<a id="intro"></a>
## CWL workflows: Brief Introduction
The **Common Workflow Language (CWL)** is an open standard for describing analysis **workflows and tools** in a way that makes them **portable and scalable** across a variety of software and hardware environments, from workstations to cluster, cloud, and high performance computing (HPC) environments.
**CWL** is a community-led specification to express **portable workflow and tool descriptions**, which can be executed by **multiple leading workflow engine implementations**. Unlike previous standardisation attempts, CWL has taken a pragmatic approach and focused on what most workflow systems are able to do: Execute command line tools and pass files around in a top-to-bottom pipeline. At the heart of CWL workflows are the **tool descriptions**. A command line is described, with parameters, input and output files, in a **YAML format** so they can be shared across workflows and linked to from registries like **ELIXIR’s bio.tools**. These are then combined and wired together in a **second YAML file** to form a workflow template, which can be **executed on any of the supported implementations**, repeatedly and **on different platforms** by specifying input files and workflow parameters. The [CWL User Guide](https://www.commonwl.org/user_guide/index.html) gives a gentle introduction to the language, while the more detailed [CWL specifications](https://www.commonwl.org/v1.1/) formalize CWL concepts so they can be implemented by the different workflow systems. A couple of **BioExcel webinars** were focused on **CWL**, an [introduction to CWL](https://www.youtube.com/watch?v=jfQb1HJWRac) and a [new open source tool to run CWL workflows on LSF (CWLEXEC)](https://www.youtube.com/watch?v=_jSTZMWtPAY).
**BioExcel building blocks** are all **described in CWL**. A specific **CWL** section in the **workflow manager adapters** [github repository](https://github.com/bioexcel/biobb_adapters/tree/master/biobb_adapters/cwl) gathers all the descriptions, divided in the different categories: io, md, analysis, chemistry, model and pmx (see updated table [here](http://mmb.irbbarcelona.org/webdev/slim/biobb/public/availability/source)).
In this tutorial, we are going to use these **BioExcel building blocks CWL descriptions** to build a **CWL** biomolecular workflow. In particular, the assembled workflow will perform a complete **Molecular Dynamics setup** (MD Setup) using **GROMACS MD package**, taking as a base the **Protein Gromacs MD Setup** [Jupyter Notebook tutorial](https://github.com/bioexcel/biobb_wf_md_setup).
No additional installation is required apart from the **Docker platform** and the **CWL tool reference executor**, as the **building blocks** will be launched using their associated **Docker containers**. _____no_output_____***
<a id="tools"></a>
## BioExcel building blocks TOOLS CWL Descriptions
Writing a workflow in CWL using the **BioExcel building blocks** is possible thanks to the already generated **CWL descriptions** for all the **building blocks** (wrappers). A specific **CWL** section in the **workflow manager adapters** [github repository](https://github.com/bioexcel/biobb_adapters/tree/master/biobb_adapters/cwl) gathers all the descriptions, divided in the different categories: io, md, analysis, chemistry, model and pmx (see updated table [here](http://mmb.irbbarcelona.org/webdev/slim/biobb/public/availability/source)).
***
<a id="toolcwl"></a>
### Tool Building Block CWL sections:
**Example**: Step 1 of the workflow, download a **protein structure** from the **PDB database**. The building block used for this is the [Pdb](https://github.com/bioexcel/biobb_io/blob/master/biobb_io/api/pdb.py) building block, from the [biobb_io](https://github.com/bioexcel/biobb_io) package, including tools to **fetch biomolecular data from public databases**. The **CWL description** for this building block can be found in the [adapters github repo](https://github.com/bioexcel/biobb_adapters/blob/master/biobb_adapters/cwl/biobb_io/mmb_api/pdb.cwl), and is shown in the following notebook cell. Description files like this one for all the steps of the workflow are needed to build and run a **CLW workflow**. To build a **CWL workflow** with **BioExcel building blocks**, one just need to download all the needed description files from the [biobb_adapters github](https://github.com/bioexcel/biobb_adapters/blob/master/biobb_adapters/cwl).
This particular example of a **Pdb building block** is useful to illustrate the most important points of the **CWL description**:
* **hints**: The **CWL hints** section describes the **process requirements** that should (but not have to) be satisfied to run the wrapped command. The implementation may report a **warning** if a hint cannot be satisfied. In the **BioExcel building blocks**, a **DockerRequirement** subsection is always present in the **hints** section, pointing to the associated **Docker container**. The **dockerPull: parameter** takes the same value that you would pass to a **docker pull** command. That is, the name of the **container image**. In this case we have used the container called **biobb_io:latest** that can be found in the **quay.io repository**, which contains the **Pdb** building block._____no_output_____
<code>
hints:
DockerRequirement:
dockerPull: quay.io/biocontainers/biobb_io:latest_____no_output_____
</code>
* **namespaces and schemas**: Input and output **metadata** may be represented within a tool or workflow. Such **metadata** must use a **namespace prefix** listed in the **$namespaces and $schemas sections** of the document. All **BioExcel building blocks CWL specifications** use the **EDAM ontology** (http://edamontology.org/) as **namespace**, with all terms included in its **Web Ontology Language** (owl) of knowledge representation (http://edamontology.org/EDAM_1.22.owl). **BioExcel** is contributing to the expansion of the **EDAM ontology** with the addition of new structural terms such as [GROMACS XTC format](http://edamontology.org/format_3875) or the [trajectory visualization operation](http://edamontology.org/operation_3890)._____no_output_____
<code>
$namespaces:
edam: http://edamontology.org/
$schemas:
- http://edamontology.org/EDAM_1.22.owl_____no_output_____
</code>
* **inputs**: The **inputs section** of a **tool** contains a list of input parameters that **control how to run the tool**. Each parameter has an **id** for the name of parameter, and **type** describing what types of values are valid for that parameter. Available primitive types are *string, int, long, float, double, and null*; complex types are *array and record*; in addition there are special types *File, Directory and Any*. The field **inputBinding** is optional and indicates whether and how the input parameter should appear on the tool’s command line, in which **position** (position), and with which **name** (prefix). The **default field** stores the **default value** for the particular **input parameter**. <br>In this particular example, the **Pdb building block** has two different **input parameters**: *output_pdb_path* and *config*. The *output_pdb_path* input parameter defines the name of the **output file** that will contain the downloaded **PDB structure**. The *config* parameter is common to all **BioExcel building blocks**, and gathers all the **properties** of the building block in a **json format**. The **question mark** after the string type (*string?*) denotes that this input is **optional**. _____no_output_____
<code>
inputs:
output_pdb_path:
type: string
inputBinding:
position: 1
prefix: --output_pdb_path
default: 'downloaded_structure.pdb'
config:
type: string?
inputBinding:
position: 2
prefix: --config
default: '{"pdb_code" : "1aki"}'_____no_output_____
</code>
* **outputs**: The **outputs section** of a **tool** contains a list of output parameters that should be returned after running the **tool**. Similarly to the inputs section, each parameter has an **id** for the name of parameter, and **type** describing what types of values are valid for that parameter. The **outputBinding** field describes how to set the value of each output parameter. The **glob field** consists of the name of a file in the **output directory**. In the **BioExcel building blocks**, every **output** has an associated **input parameter** defined in the previous input section, defining the name of the file to be generated. <br>In the particular **Pdb building block** example, the *output_pdb_file* parameter of type *File* is coupled to the *output_pdb_path* input parameter, using the **outputBinding** and the **glob** fields. The standard **PDB** format of the output file is also specified using the **EDAM ontology** format id 1476 ([edam:format_1476](http://edamontology.org/format_1476)). _____no_output_____
<code>
outputs:
output_pdb_file:
type: File
format: edam:format_1476
outputBinding:
glob: $(inputs.output_pdb_path)_____no_output_____
</code>
For more information on CWL tools description, please refer to the [CWL User Guide](https://www.commonwl.org/user_guide/index.html) or the [CWL specifications](https://www.commonwl.org/v1.1/).
***
<a id="pdbcwl"></a>
### Complete Pdb Building Block CWL description:
Example of a **BioExcel building block CWL description** (pdb from biobb_io package)_____no_output_____
<code>
# Example of a BioExcel building block CWL description (pdb from biobb_io package)
#!/usr/bin/env cwl-runner
cwlVersion: v1.0
class: CommandLineTool
baseCommand: pdb
hints:
DockerRequirement:
dockerPull: quay.io/biocontainers/biobb_io:latest
inputs:
output_pdb_path:
type: string
inputBinding:
position: 1
prefix: --output_pdb_path
default: 'downloaded_structure.pdb'
config:
type: string?
inputBinding:
position: 2
prefix: --config
default: '{"pdb_code" : "1aki"}'
outputs:
output_pdb_file:
type: File
format: edam:format_1476
outputBinding:
glob: $(inputs.output_pdb_path)
$namespaces:
edam: http://edamontology.org/
$schemas:
- http://edamontology.org/EDAM_1.22.owl_____no_output_____
</code>
***
<a id="workflows"></a>
## BioExcel building blocks WORKFLOWS CWL Descriptions
Now that we have seen the **BioExcel building blocks CWL descriptions**, we can use them to build our first **biomolecular workflow** as a demonstrator. All **CWL workflows** are divided in **two files**: the **CWL description** and the **YAML** or **JSON** files containing **all workflow inputs**. Starting with the **CWL workflow description**, let's explore our first example **section by section**.
<a id="cwlheader"></a>
### Header:
* **cwlVersion** field indicates the version of the **CWL spec** used by the document.
* **class** field indicates this document describes a **workflow**._____no_output_____
<code>
# !/usr/bin/env cwl-runner
cwlVersion: v1.0
class: Workflow
label: Example CWL Header
doc: |
An example of how to create a CWl header. We have specified the version
of CWL that we are using; the class, which is a 'workflow'. The label
field should provide a short title or description of the workflow and
the description should provide a longer description of what the workflow
doe._____no_output_____
</code>
<a id="inputs"></a>
### Inputs:
The **inputs section** describes the inputs for **each of the steps** of the workflow. The **BioExcel building blocks (biobb)** have three types of **input parameters**: **input**, **output**, and **properties**. The **properties** parameter, which contains all the input parameters that are neither **input** nor **output files**, is defined in **JSON format** (see examples in the **Protein MD Setup** [Jupyter Notebook tutorial](https://github.com/bioexcel/biobb_wf_md_setup)).
**Example**: Step 1 of the workflow, download a **protein structure** from the **PDB database**. Two different **inputs** are needed for this step: the **name of the file** that will contain the downloaded PDB structure (*step1_output_name*), and the **properties** of the building block (*step1_properties*), that in this case will indicate the PDB code to look for (see **Input of a run** section). Both input parameters have type *string* in this **building block**. _____no_output_____
<code>
# CWL workflow inputs section example
inputs:
step1_output_name: string
step1_properties: string_____no_output_____
</code>
<a id="outputs"></a>
### Outputs:
The **outputs:** section describes the set of **final outputs** from the **workflow**. These outputs can be a collection of outputs from **different steps of the workflow**. Each output is a `key: value` pair. The `key` should be a unique identifier, and the value should be a dictionary (consisting of `key: value` pairs). These `keys` consists of `label`, which is a title or name for the output; `doc`, which is a longer description of what this output is; `type`, which is the data type expected; and `outputSource`, which connects the output parameter of a **particular step** to the **workflow final output parameter**._____no_output_____
<code>
outputs:
pdb: #unique identifier
label: Protein structure
doc: |
Step 1 of the workflow, download a 'protein structure' from the
'PDB database'. The *pdb* 'output' is a 'file' containing the
'protein structure' in 'PDB format', which is connected to the
output parameter *output_pdb_file* of the 'step1 of the workflow'
(*step1_pdb*).
type: File #data type
outputSource: step1_pdb/output_pdb_file_____no_output_____
</code>
<a id="steps"></a>
### Steps:
The **steps section** describes the actual steps of the workflow. Steps are **connected** one to the other through the **input parameters**.
**Workflow steps** are not necessarily run in the order they are listed, instead **the order is determined by the dependencies between steps**. In addition, workflow steps which do not depend on one another may run **in parallel**.
**Example**: Step 1 and 2 of the workflow, download a **protein structure** from the **PDB database**, and **fix the side chains**, adding any side chain atoms missing in the original structure. Note how **step1 and step2** are **connected** through the **output** of one and the **input** of the other: **Step2** (*step2_fixsidechain*) receives as **input** (*input_pdb_path*) the **output of the step1** (*step1_pdb*), identified as *step1_pdb/output_pdb_file*._____no_output_____
<code>
# CWL workflow steps section example
step1_pdb:
label: Fetch PDB Structure
doc: |
Download a protein structure from the PDB database
run: biobb/biobb_adapters/cwl/biobb_io/mmb_api/pdb.cwl
in:
output_pdb_path: step1_pdb_name
config: step1_pdb_config
out: [output_pdb_file]
step2_fixsidechain:
label: Fix Protein structure
doc: |
Fix the side chains, adding any side chain atoms missing in the
original structure.
run: biobb/biobb_adapters/cwl/biobb_model/model/fix_side_chain.cwl
in:
input_pdb_path: step1_pdb/output_pdb_file
out: [output_pdb_file]_____no_output_____
</code>
<a id="run"></a>
### Input of a run:
As previously stated, all **CWL workflows** are divided in **two files**: the **CWL description** and the **YAML** or **JSON** files containing **all workflow inputs**. In this example, we are going to produce a **YAML** formatted object in a separate file describing the **inputs of our run**.
**Example**: Step 1 of the workflow, download a **protein structure** from the **PDB database**. The **step1_output_name** contains the name of the file that is going to be produced by the **building block**, whereas the **JSON-formatted properties** (**step1_properties**) contain the **pdb code** of the structure to be downloaded:
* step1_output_name: **"tutorial_1aki.pdb"**
* step1_properties: **{"pdb_code" : "1aki"}**_____no_output_____
<code>
step1_output_name: 'tutorial_1aki.pdb'
step1_properties: '{"pdb_code" : "1aki"}'_____no_output_____
</code>
<a id="wf"></a>
### Complete workflow:
Example of a short **CWL workflow** with **BioExcel building blocks**, which retrieves a **PDB file** for the **Lysozyme protein structure** from the RCSB PDB database (**step1: pdb.cwl**), and fixes the possible problems in the structure, adding **missing side chain atoms** if needed (**step2: fix_side_chain.cwl**). _____no_output_____
<code>
# !/usr/bin/env cwl-runner
cwlVersion: v1.0
class: Workflow
label: Example of a short CWL workflow with BioExcel building blocks
doc: |
Example of a short 'CWL workflow' with 'BioExcel building blocks', which
retrieves a 'PDB file' for the 'Lysozyme protein structure' from the RCSB PDB
database ('step1: pdb.cwl'), and fixes the possible problems in the structure,
adding 'missing side chain atoms' if needed ('step2: fix_side_chain.cwl').
inputs:
step1_properties: '{"pdb_code" : "1aki"}'
step1_output_name: 'tutorial_1aki.pdb'
outputs:
pdb:
type: File
outputSource: step2_fixsidechain/output_pdb_file
steps:
step1_pdb:
label: Fetch PDB Structure
doc: |
Download a protein structure from the PDB database
run: biobb_adapters/pdb.cwl
in:
output_pdb_path: step1_output_name
config: step1_properties
out: [output_pdb_file]
step2_fixsidechain:
label: Fix Protein structure
doc: |
Fix the side chains, adding any side chain atoms missing in the
original structure.
run: biobb_adapters/fix_side_chain.cwl
in:
input_pdb_path: step1_pdb/output_pdb_file
out: [output_pdb_file]_____no_output_____
</code>
<a id="runwf"></a>
### Running the CWL workflow:
The final step of the process is **running the workflow described in CWL**. For that, the description presented in the previous cell should be written to a file (e.g. BioExcel-CWL-firstWorkflow.cwl), the **YAML** input should be written to a separate file (e.g. BioExcel-CWL-firstWorkflow-job.yml) and finally both files should be used with the **CWL tool description reference implementation executer** (cwltool).
It is important to note that in order to properly run the **CWL workflow**, the **CWL descriptions** for all the **building blocks** used in the **workflow** should be accessible from the file system. In this example, all the **CWL descriptions** needed where downloaded from the [BioExcel building blocks adapters github repository](https://github.com/bioexcel/biobb_adapters/tree/master/biobb_adapters/cwl) to a folder named **biobb_adapters**.
The **command line** is shown in the cell below:_____no_output_____
<code>
# Run CWL workflow with CWL tool description reference implementation (cwltool).
cwltool BioExcel-CWL-firstWorkflow.cwl BioExcel-CWL-firstWorkflow-job.yml_____no_output_____
</code>
<a id="wfoutput"></a>
### Cwltool workflow output
The **execution of the workflow** will write information to the standard output such as the **step being performed**, the **way it is run** (command line, docker container, etc.), **inputs and outputs** used, and **state of each step** (success, failed). The next cell contains a **real output** for the **execution of our first example**:_____no_output_____
<code>
Resolved 'BioExcel-CWL-firstWorkflow.cwl' to 'file:///PATH/biobb_wf_md_setup/cwl/BioExcel-CWL-firstWorkflow.cwl'
[workflow BioExcel-CWL-firstWorkflow.cwl] start
[step step1_pdb] start
[job step1_pdb] /private/tmp/docker_tmp1g8y0wu0$ docker \
run \
-i \
--volume=/private/tmp/docker_tmp1g8y0wu0:/private/var/spool/cwl:rw \
--volume=/private/var/folders/7f/0hxgf3d971b98lk_fps26jx40000gn/T/tmps4_pw5tj:/tmp:rw \
--workdir=/private/var/spool/cwl \
--read-only=true \
--user=501:20 \
--rm \
--env=TMPDIR=/tmp \
--env=HOME=/private/var/spool/cwl \
quay.io/biocontainers/biobb_io:0.1.3--py_0 \
pdb \
--config \
'{"pdb_code" : "1aki"}' \
--output_pdb_path \
tutorial.pdb
2019-10-24 08:42:06,235 [MainThread ] [INFO ] Downloading: 1aki from: https://files.rcsb.org/download/1aki.pdb
2019-10-24 08:42:07,594 [MainThread ] [INFO ] Writting pdb to: /private/var/spool/cwl/tutorial.pdb
2019-10-24 08:42:07,607 [MainThread ] [INFO ] Filtering lines NOT starting with one of these words: ['ATOM', 'MODEL', 'ENDMDL']
[job step1_pdb] completed success
[step step1_pdb] completed success
[step step2_fixsidechain] start
[job step2_fixsidechain] /private/tmp/docker_tmpuaecttdd$ docker \
run \
-i \
--volume=/private/tmp/docker_tmpuaecttdd:/private/var/spool/cwl:rw \
--volume=/private/var/folders/7f/0hxgf3d971b98lk_fps26jx40000gn/T/tmp9t_nks8r:/tmp:rw \
--volume=/private/tmp/docker_tmp1g8y0wu0/tutorial.pdb:/private/var/lib/cwl/stg5b2950e7-ef54-4df6-be70-677050c4c258/tutorial.pdb:ro \
--workdir=/private/var/spool/cwl \
--read-only=true \
--user=501:20 \
--rm \
--env=TMPDIR=/tmp \
--env=HOME=/private/var/spool/cwl \
quay.io/biocontainers/biobb_model:0.1.3--py_0 \
fix_side_chain \
--input_pdb_path \
/private/var/lib/cwl/stg5b2950e7-ef54-4df6-be70-677050c4c258/tutorial.pdb \
--output_pdb_path \
fixed.pdb
[job step2_fixsidechain] completed success
[step step2_fixsidechain] completed success
[workflow BioExcel-CWL-firstWorkflow.cwl] completed success
{
"pdb": {
"location": "file:///PATH/biobb_wf_md_setup/cwl/fixed.pdb",
"basename": "fixed.pdb",
"class": "File",
"checksum": "sha1$3ef7a955f93f25af5e59b85bcf4cb1d0bbf69a40",
"size": 81167,
"format": "http://edamontology.org/format_1476",
"path": "/PATH/biobb_wf_md_setup/cwl/fixed.pdb"
}
}
Final process status is success_____no_output_____
</code>
***
<a id="mdsetup"></a>
## Protein MD-Setup CWL workflow with BioExcel building blocks
The last step of this **tutorial** illustrates the building of a **complex CWL workflow**. The example used is the **Protein Gromacs MD Setup** [Jupyter Notebook tutorial](https://github.com/bioexcel/biobb_wf_md_setup). It is strongly recommended to take a look at this **notebook** before moving on to the next sections of this **tutorial**, as it contains information for all the **building blocks** used. The aim of this **tutorial** is to illustrate how to build **CWL workflows** using the **BioExcel building blocks**. For information about the science behind every step of the workflow, please refer to the **Protein Gromacs MD Setup** Jupyter Notebook tutorial. The **workflow** presented in the next cells is a translation of the very same workflow to **CWL language**, including the same **number of steps** (23) and **building blocks**.
<a id="mdsteps"></a>
### Steps:
First of all, let's define the **steps of the workflow**.
* **Fetching PDB Structure**: step 1
* **Fix Protein Structure**: step 2
* **Create Protein System Topology**: step 3
* **Create Solvent Box**: step 4
* **Fill the Box with Water Molecules**: step 5
* **Adding Ions**: steps 6 and 7
* **Energetically Minimize the System**: steps 8, 9 and 10
* **Equilibrate the System (NVT)**: steps 11, 12 and 13
* **Equilibrate the System (NPT)**: steps 14, 15 and 16
* **Free Molecular Dynamics Simulation**: steps 17 and 18
* **Post-processing Resulting 3D Trajectory**: steps 19 to 23
Mandatory and optional **inputs** and **outputs** of every **building block** can be consulted in the appropriate **documentation** pages from the corresponding **BioExcel building block** category (see updated table [here](http://mmb.irbbarcelona.org/webdev/slim/biobb/public/availability/source)). _____no_output_____
<code>
step1_pdb:
label: Fetch PDB Structure
doc: |
Download a protein structure from the PDB database
run: biobb/biobb_adapters/cwl/biobb_io/mmb_api/pdb.cwl
in:
output_pdb_path: step1_pdb_name
config: step1_pdb_config
out: [output_pdb_file]
step2_fixsidechain:
label: Fix Protein structure
doc: |
Fix the side chains, adding any side chain atoms missing in the
original structure.
run: biobb/biobb_adapters/cwl/biobb_model/model/fix_side_chain.cwl
in:
input_pdb_path: step1_pdb/output_pdb_file
out: [output_pdb_file]
step3_pdb2gmx:
label: Create Protein System Topology
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/pdb2gmx.cwl
in:
input_pdb_path: step2_fixsidechain/output_pdb_file
out: [output_gro_file, output_top_zip_file]
step4_editconf:
label: Create Solvent Box
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/editconf.cwl
in:
input_gro_path: step3_pdb2gmx/output_gro_file
out: [output_gro_file]
step5_solvate:
label: Fill the Box with Water Molecules
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/solvate.cwl
in:
input_solute_gro_path: step4_editconf/output_gro_file
input_top_zip_path: step3_pdb2gmx/output_top_zip_file
out: [output_gro_file, output_top_zip_file]
step6_grompp_genion:
label: Add Ions - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step6_gppion_config
input_gro_path: step5_solvate/output_gro_file
input_top_zip_path: step5_solvate/output_top_zip_file
out: [output_tpr_file]
step7_genion:
label: Add Ions - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/genion.cwl
in:
config: step7_genion_config
input_tpr_path: step6_grompp_genion/output_tpr_file
input_top_zip_path: step5_solvate/output_top_zip_file
out: [output_gro_file, output_top_zip_file]
step8_grompp_min:
label: Energetically Minimize the System - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step8_gppmin_config
input_gro_path: step7_genion/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
out: [output_tpr_file]
step9_mdrun_min:
label: Energetically Minimize the System - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step8_grompp_min/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file]
step10_energy_min:
label: Energetically Minimize the System - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step10_energy_min_config
output_xvg_path: step10_energy_min_name
input_energy_path: step9_mdrun_min/output_edr_file
out: [output_xvg_file]
step11_grompp_nvt:
label: Equilibrate the System (NVT) - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step11_gppnvt_config
input_gro_path: step9_mdrun_min/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
out: [output_tpr_file]
step12_mdrun_nvt:
label: Equilibrate the System (NVT) - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step11_grompp_nvt/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step13_energy_nvt:
label: Equilibrate the System (NVT) - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step13_energy_nvt_config
output_xvg_path: step13_energy_nvt_name
input_energy_path: step12_mdrun_nvt/output_edr_file
out: [output_xvg_file]
step14_grompp_npt:
label: Equilibrate the System (NPT) - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step14_gppnpt_config
input_gro_path: step12_mdrun_nvt/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
input_cpt_path: step12_mdrun_nvt/output_cpt_file
out: [output_tpr_file]
step15_mdrun_npt:
label: Equilibrate the System (NPT) - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step14_grompp_npt/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step16_energy_npt:
label: Equilibrate the System (NPT) - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step16_energy_npt_config
output_xvg_path: step16_energy_npt_name
input_energy_path: step15_mdrun_npt/output_edr_file
out: [output_xvg_file]
step17_grompp_md:
label: Free Molecular Dynamics Simulation - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step17_gppmd_config
input_gro_path: step15_mdrun_npt/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
input_cpt_path: step15_mdrun_npt/output_cpt_file
out: [output_tpr_file]
step18_mdrun_md:
label: Free Molecular Dynamics Simulation - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step17_grompp_md/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step19_rmsfirst:
label: Post-processing Resulting 3D Trajectory - part 1
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rms.cwl
in:
config: step19_rmsfirst_config
output_xvg_path: step19_rmsfirst_name
input_structure_path: step17_grompp_md/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step20_rmsexp:
label: Post-processing Resulting 3D Trajectory - part 2
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rms.cwl
in:
config: step20_rmsexp_config
output_xvg_path: step20_rmsexp_name
input_structure_path: step8_grompp_min/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step21_rgyr:
label: Post-processing Resulting 3D Trajectory - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rgyr.cwl
in:
config: step21_rgyr_config
input_structure_path: step8_grompp_min/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step22_image:
label: Post-processing Resulting 3D Trajectory - part 4
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_image.cwl
in:
config: step22_image_config
input_top_path: step17_grompp_md/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_traj_file]
step23_dry:
label: Post-processing Resulting 3D Trajectory - part 5
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_trjconv_str.cwl
in:
config: step23_dry_config
input_structure_path: step18_mdrun_md/output_gro_file
input_top_path: step17_grompp_md/output_tpr_file
out: [output_str_file]_____no_output_____
</code>
<a id="mdinputs"></a>
### Inputs:
All inputs for the **BioExcel building blocks** are defined as *strings*. Not all the steps in this particular example need **external inputs**, some of them just works using as input/s an output (or outputs) from **previous steps** (e.g. step2_fixsidechain). For the steps that need input, all of them will receive a **JSON** formatted input (of type string), with the **properties parameters** of the **building blocks** (config). Apart from that, some of the **building blocks** in this example are receiving two different input parameters: the **properties** (e.g. *step1_pdb_config*) and the **name of the output file** to be written (e.g. *step1_pdb_name*). This is particularly useful to identify the files generated by different steps of the **workflow**. Besides, in cases where the same **building block** is used more than once, using the **default value** for the **output files** will cause the **overwritting** of the results generated by previous steps (e.g. energy calculation steps).
All these inputs will be filled up with values from the **separated YAML input file**. _____no_output_____
<code>
inputs:
step1_pdb_name: string
step1_pdb_config: string
step4_editconf_config: string
step6_gppion_config: string
step7_genion_config: string
step8_gppmin_config: string
step10_energy_min_config: string
step10_energy_min_name: string
step11_gppnvt_config: string
step13_energy_nvt_config: string
step13_energy_nvt_name: string
step14_gppnpt_config: string
step16_energy_npt_config: string
step16_energy_npt_name: string
step17_gppmd_config: string
step19_rmsfirst_config: string
step19_rmsfirst_name: string
step20_rmsexp_config: string
step20_rmsexp_name: string
step21_rgyr_config: string
step22_image_config: string
step23_dry_config: string_____no_output_____
</code>
<a id="mdoutputs"></a>
### Outputs:
The **outputs section** contains the set of **final outputs** from the **workflow**. In this case, **outputs** from **different steps** of the **workflow** are considered **final outputs**:
* **Trajectories**:
* **trr**: Raw trajectory from the *free* simulation step.
* **trr_imaged_dry**: Post-processed trajectory, dehydrated, imaged (rotations and translations removed) and centered.
* **Structures**:
* **gro**: Raw structure from the *free* simulation step.
* **gro_dry**: Resulting protein structure taken from the post-processed trajectory, to be used as a topology, usually for visualization purposes.
* **Topologies**:
* **tpr**: GROMACS portable binary run input file, containing the starting structure of the simulation, the molecular topology and all the simulation parameters.
* **top**: GROMACS topology file, containing the molecular topology in an ASCII readable format.
* **System Setup Observables**:
* **xvg_min**: Potential energy of the system during the minimization step.
* **xvg_nvt**: Temperature of the system during the NVT equilibration step.
* **xvg_npt**: Pressure and density of the system (box) during the NPT equilibration step.
* **Simulation Analysis**:
* **xvg_rmsfirst**: Root Mean Square deviation (RMSd) throughout the whole *free* simulation step against the first snapshot of the trajectory (equilibrated system).
* **xvg_rmsexp**: Root Mean Square deviation (RMSd) throughout the whole *free* simulation step against the experimental structure (minimized system).
* **xvg_rgyr**: Radius of Gyration (RGyr) of the molecule throughout the whole *free* simulation step.
* **Checkpoint file**:
* **cpt**: GROMACS portable checkpoint file, allowing to restore (continue) the simulation from the last step of the setup process.
Please note that the name of the **output files** is sometimes fixed by a **specific input** (e.g. step10_energy_min_name), whereas when no specific name is given as input, the **default value** is used (e.g. system.tpr). **Default values** can be found in the **CWL description** files for each **building block** (biobb_adapters). _____no_output_____
<code>
outputs:
trr:
label: Trajectories - Raw trajectory
doc: |
Raw trajectory from the free simulation step
type: File
outputSource: step18_mdrun_md/output_trr_file
trr_imaged_dry:
label: Trajectories - Post-processed trajectory
doc: |
Post-processed trajectory, dehydrated, imaged (rotations and translations
removed) and centered.
type: File
outputSource: step22_image/output_traj_file
gro_dry:
label: Resulting protein structure
doc: |
Resulting protein structure taken from the post-processed trajectory, to
be used as a topology, usually for visualization purposes.
type: File
outputSource: step23_dry/output_str_file
gro:
label: Structures - Raw structure
doc: |
Raw structure from the free simulation step.
type: File
outputSource: step18_mdrun_md/output_gro_file
cpt:
label: Checkpoint file
doc: |
GROMACS portable checkpoint file, allowing to restore (continue) the
simulation from the last step of the setup process.
type: File
outputSource: step18_mdrun_md/output_cpt_file
tpr:
label: Topologies GROMACS portable binary run
doc: |
GROMACS portable binary run input file, containing the starting structure
of the simulation, the molecular topology and all the simulation parameters.
type: File
outputSource: step17_grompp_md/output_tpr_file
top:
label: GROMACS topology file
doc: |
GROMACS topology file, containing the molecular topology in an ASCII
readable format.
type: File
outputSource: step7_genion/output_top_zip_file
xvg_min:
label: System Setup Observables - Potential Energy
doc: |
Potential energy of the system during the minimization step.
type: File
outputSource: step10_energy_min/output_xvg_file
xvg_nvt:
label: System Setup Observables - Temperature
doc: |
Temperature of the system during the NVT equilibration step.
type: File
outputSource: step13_energy_nvt/output_xvg_file
xvg_npt:
label: System Setup Observables - Pressure and density
type: File
outputSource: step16_energy_npt/output_xvg_file
xvg_rmsfirst:
label: Simulation Analysis
doc: |
Root Mean Square deviation (RMSd) throughout the whole free simulation
step against the first snapshot of the trajectory (equilibrated system).
type: File
outputSource: step19_rmsfirst/output_xvg_file
xvg_rmsexp:
label: Simulation Analysis
doc: |
Root Mean Square deviation (RMSd) throughout the whole free simulation
step against the experimental structure (minimized system).
type: File
outputSource: step20_rmsexp/output_xvg_file
xvg_rgyr:
label: Simulation Analysis
doc: |
Radius of Gyration (RGyr) of the molecule throughout the whole free simulation step
type: File
outputSource: step21_rgyr/output_xvg_file_____no_output_____
</code>
<a id="mdworkflow"></a>
### Complete workflow:
The complete **CWL described workflow** to run a **Molecular Dynamics Setup** on a protein structure can be found in the next cell. The **representation of the workflow** using the **CWL Viewer** web service can be found here: XXXXXX. The **full workflow** is a combination of the **inputs**, **outputs** and **steps** revised in the previous cells. _____no_output_____
<code>
# Protein MD-Setup CWL workflow with BioExcel building blocks
# https://github.com/bioexcel/biobb_wf_md_setup
#!/usr/bin/env cwl-runner
cwlVersion: v1.0
class: Workflow
inputs:
step1_pdb_name: string
step1_pdb_config: string
step4_editconf_config: string
step6_gppion_config: string
step7_genion_config: string
step8_gppmin_config: string
step10_energy_min_config: string
step10_energy_min_name: string
step11_gppnvt_config: string
step13_energy_nvt_config: string
step13_energy_nvt_name: string
step14_gppnpt_config: string
step16_energy_npt_config: string
step16_energy_npt_name: string
step17_gppmd_config: string
step19_rmsfirst_config: string
step19_rmsfirst_name: string
step20_rmsexp_config: string
step20_rmsexp_name: string
step21_rgyr_config: string
step22_image_config: string
step23_dry_config: string
outputs:
trr:
label: Trajectories - Raw trajectory
doc: |
Raw trajectory from the free simulation step
type: File
outputSource: step18_mdrun_md/output_trr_file
trr_imaged_dry:
label: Trajectories - Post-processed trajectory
doc: |
Post-processed trajectory, dehydrated, imaged (rotations and translations
removed) and centered.
type: File
outputSource: step22_image/output_traj_file
gro_dry:
label: Resulting protein structure
doc: |
Resulting protein structure taken from the post-processed trajectory, to
be used as a topology, usually for visualization purposes.
type: File
outputSource: step23_dry/output_str_file
gro:
label: Structures - Raw structure
doc: |
Raw structure from the free simulation step.
type: File
outputSource: step18_mdrun_md/output_gro_file
cpt:
label: Checkpoint file
doc: |
GROMACS portable checkpoint file, allowing to restore (continue) the
simulation from the last step of the setup process.
type: File
outputSource: step18_mdrun_md/output_cpt_file
tpr:
label: Topologies GROMACS portable binary run
doc: |
GROMACS portable binary run input file, containing the starting structure
of the simulation, the molecular topology and all the simulation parameters.
type: File
outputSource: step17_grompp_md/output_tpr_file
top:
label: GROMACS topology file
doc: |
GROMACS topology file, containing the molecular topology in an ASCII
readable format.
type: File
outputSource: step7_genion/output_top_zip_file
xvg_min:
label: System Setup Observables - Potential Energy
doc: |
Potential energy of the system during the minimization step.
type: File
outputSource: step10_energy_min/output_xvg_file
xvg_nvt:
label: System Setup Observables - Temperature
doc: |
Temperature of the system during the NVT equilibration step.
type: File
outputSource: step13_energy_nvt/output_xvg_file
xvg_npt:
label: System Setup Observables - Pressure and density
type: File
outputSource: step16_energy_npt/output_xvg_file
xvg_rmsfirst:
label: Simulation Analysis
doc: |
Root Mean Square deviation (RMSd) throughout the whole free simulation
step against the first snapshot of the trajectory (equilibrated system).
type: File
outputSource: step19_rmsfirst/output_xvg_file
xvg_rmsexp:
label: Simulation Analysis
doc: |
Root Mean Square deviation (RMSd) throughout the whole free simulation
step against the experimental structure (minimized system).
type: File
outputSource: step20_rmsexp/output_xvg_file
xvg_rgyr:
label: Simulation Analysis
doc: |
Radius of Gyration (RGyr) of the molecule throughout the whole free simulation step
type: File
outputSource: step21_rgyr/output_xvg_file
steps:
step1_pdb:
label: Fetch PDB Structure
doc: |
Download a protein structure from the PDB database
run: biobb/biobb_adapters/cwl/biobb_io/mmb_api/pdb.cwl
in:
output_pdb_path: step1_pdb_name
config: step1_pdb_config
out: [output_pdb_file]
step2_fixsidechain:
label: Fix Protein structure
doc: |
Fix the side chains, adding any side chain atoms missing in the
original structure.
run: biobb/biobb_adapters/cwl/biobb_model/model/fix_side_chain.cwl
in:
input_pdb_path: step1_pdb/output_pdb_file
out: [output_pdb_file]
step3_pdb2gmx:
label: Create Protein System Topology
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/pdb2gmx.cwl
in:
input_pdb_path: step2_fixsidechain/output_pdb_file
out: [output_gro_file, output_top_zip_file]
step4_editconf:
label: Create Solvent Box
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/editconf.cwl
in:
input_gro_path: step3_pdb2gmx/output_gro_file
out: [output_gro_file]
step5_solvate:
label: Fill the Box with Water Molecules
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/solvate.cwl
in:
input_solute_gro_path: step4_editconf/output_gro_file
input_top_zip_path: step3_pdb2gmx/output_top_zip_file
out: [output_gro_file, output_top_zip_file]
step6_grompp_genion:
label: Add Ions - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step6_gppion_config
input_gro_path: step5_solvate/output_gro_file
input_top_zip_path: step5_solvate/output_top_zip_file
out: [output_tpr_file]
step7_genion:
label: Add Ions - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/genion.cwl
in:
config: step7_genion_config
input_tpr_path: step6_grompp_genion/output_tpr_file
input_top_zip_path: step5_solvate/output_top_zip_file
out: [output_gro_file, output_top_zip_file]
step8_grompp_min:
label: Energetically Minimize the System - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step8_gppmin_config
input_gro_path: step7_genion/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
out: [output_tpr_file]
step9_mdrun_min:
label: Energetically Minimize the System - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step8_grompp_min/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file]
step10_energy_min:
label: Energetically Minimize the System - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step10_energy_min_config
output_xvg_path: step10_energy_min_name
input_energy_path: step9_mdrun_min/output_edr_file
out: [output_xvg_file]
step11_grompp_nvt:
label: Equilibrate the System (NVT) - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step11_gppnvt_config
input_gro_path: step9_mdrun_min/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
out: [output_tpr_file]
step12_mdrun_nvt:
label: Equilibrate the System (NVT) - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step11_grompp_nvt/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step13_energy_nvt:
label: Equilibrate the System (NVT) - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step13_energy_nvt_config
output_xvg_path: step13_energy_nvt_name
input_energy_path: step12_mdrun_nvt/output_edr_file
out: [output_xvg_file]
step14_grompp_npt:
label: Equilibrate the System (NPT) - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step14_gppnpt_config
input_gro_path: step12_mdrun_nvt/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
input_cpt_path: step12_mdrun_nvt/output_cpt_file
out: [output_tpr_file]
step15_mdrun_npt:
label: Equilibrate the System (NPT) - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step14_grompp_npt/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step16_energy_npt:
label: Equilibrate the System (NPT) - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_energy.cwl
in:
config: step16_energy_npt_config
output_xvg_path: step16_energy_npt_name
input_energy_path: step15_mdrun_npt/output_edr_file
out: [output_xvg_file]
step17_grompp_md:
label: Free Molecular Dynamics Simulation - part 1
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/grompp.cwl
in:
config: step17_gppmd_config
input_gro_path: step15_mdrun_npt/output_gro_file
input_top_zip_path: step7_genion/output_top_zip_file
input_cpt_path: step15_mdrun_npt/output_cpt_file
out: [output_tpr_file]
step18_mdrun_md:
label: Free Molecular Dynamics Simulation - part 2
run: biobb/biobb_adapters/cwl/biobb_md/gromacs/mdrun.cwl
in:
input_tpr_path: step17_grompp_md/output_tpr_file
out: [output_trr_file, output_gro_file, output_edr_file, output_log_file, output_cpt_file]
step19_rmsfirst:
label: Post-processing Resulting 3D Trajectory - part 1
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rms.cwl
in:
config: step19_rmsfirst_config
output_xvg_path: step19_rmsfirst_name
input_structure_path: step17_grompp_md/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step20_rmsexp:
label: Post-processing Resulting 3D Trajectory - part 2
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rms.cwl
in:
config: step20_rmsexp_config
output_xvg_path: step20_rmsexp_name
input_structure_path: step8_grompp_min/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step21_rgyr:
label: Post-processing Resulting 3D Trajectory - part 3
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_rgyr.cwl
in:
config: step21_rgyr_config
input_structure_path: step8_grompp_min/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_xvg_file]
step22_image:
label: Post-processing Resulting 3D Trajectory - part 4
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_image.cwl
in:
config: step22_image_config
input_top_path: step17_grompp_md/output_tpr_file
input_traj_path: step18_mdrun_md/output_trr_file
out: [output_traj_file]
step23_dry:
label: Post-processing Resulting 3D Trajectory - part 5
run: biobb/biobb_adapters/cwl/biobb_analysis/gromacs/gmx_trjconv_str.cwl
in:
config: step23_dry_config
input_structure_path: step18_mdrun_md/output_gro_file
input_top_path: step17_grompp_md/output_tpr_file
out: [output_str_file]_____no_output_____
</code>
<a id="mdrun"></a>
### Input of the run:
As previously stated, all **CWL workflows** are divided in **two files**: the **CWL description** and the **YAML** or **JSON** files containing **all workflow inputs**. The following cell presents the **YAML** file describing the **inputs of the run** for the **Protein Gromacs MD Setup** workflow.
All the steps were defined as *strings* in the **CWL workflow**; **Building blocks** inputs ending by "*_name*" contain a simple *string* with the wanted file name; **Building blocks** inputs ending by "*_config*" contain the **properties parameters** in a *string* reproducing a **JSON format**. Please note here that all double quotes in **JSON format** must be escaped. The **properties parameters** were taken from the original **Protein Gromacs MD Setup** workflow [Jupyter Notebook tutorial](https://github.com/bioexcel/biobb_wf_md_setup). Please refer to it to find information about the values used. _____no_output_____
<code>
# Protein MD-Setup CWL workflow with BioExcel building blocks - Input YAML configuration file
# https://github.com/bioexcel/biobb_wf_md_setup
step1_pdb_name: 'tutorial.pdb'
step1_pdb_config: '{"pdb_code" : "1aki"}'
step4_editconf_config: '{"box_type": "cubic","distance_to_molecule": 1.0}'
step6_gppion_config: '{"mdp": {"type":"minimization"}}'
step7_genion_config: '{"neutral": "True"}'
step8_gppmin_config: '{"mdp": {"type":"minimization", "nsteps":"5000", "emtol":"500"}}'
step10_energy_min_config: '{"terms": ["Potential"]}'
step10_energy_min_name: 'energy_min.xvg'
step11_gppnvt_config: '{"mdp": {"type":"nvt", "nsteps":"5000", "dt":0.002, "define":"-DPOSRES"}}'
step13_energy_nvt_config: '{"terms": ["Temperature"]}'
step13_energy_nvt_name: 'energy_nvt.xvg'
step14_gppnpt_config: '{"mdp": {"type":"npt", "nsteps":"5000"}}'
step16_energy_npt_config: '{"terms": ["Pressure","Density"]}'
step16_energy_npt_name: 'energy_npt.xvg'
step17_gppmd_config: '{"mdp": {"type":"free", "nsteps":"50000"}}'
step19_rmsfirst_config: '{"selection": "Backbone"}'
step19_rmsfirst_name: 'rmsd_first.xvg'
step20_rmsexp_config: '{"selection": "Backbone"}'
step20_rmsexp_name: 'rmsd_exp.xvg'
step21_rgyr_config: '{"selection": "Backbone"}'
step22_image_config: '{"center_selection":"Protein","output_selection":"Protein","pbc":"mol"}'
step23_dry_config: '{"selection": "Protein"}'_____no_output_____
</code>
<a id="mdcwlrun"></a>
### Running the CWL workflow:
The final step of the process is **running the workflow described in CWL**. For that, the complete **workflow description** should be written to a file (e.g. BioExcel-CWL-MDSetup.cwl), the **YAML** input should be written to a separate file (e.g. BioExcel-CWL-MDSetup-job.yml) and finally both files should be used with the **CWL tool description reference implementation executer** (cwltool).
As in the previous example, it is important to note that in order to properly run the **CWL workflow**, the **CWL descriptions** for all the **building blocks** used in the **workflow** should be accessible from the file system. In this example, all the **CWL descriptions** needed where downloaded from the [BioExcel building blocks adapters github repository](https://github.com/bioexcel/biobb_adapters/tree/master/biobb_adapters/cwl) to a folder named **biobb_adapters**.
It is worth to note that as this workflow is using different **BioExcel building block modules** (biobb_io, biobb_model, biobb_md and biobb_analysis), so the **Docker container** for each of the modules will be downloaded the first time that it is launched. This process **could take some time** (and **disk space**). Once all the **Docker containers** are correctly downloaded and integrated in the system, the **workflow** should take around 1h (depending on the machine used).
The **command line** is shown in the cell below:_____no_output_____
<code>
# Run CWL workflow with CWL tool description reference implementation (cwltool).
cwltool BioExcel-CWL-MDSetup.cwl BioExcel-CWL-MDSetup-job.yml_____no_output_____
</code>
***
<a id="questions"></a>
## Questions & Comments
Questions, issues, suggestions and comments are really welcome!
* GitHub issues:
* [https://github.com/bioexcel/biobb](https://github.com/bioexcel/biobb)
* BioExcel forum:
* [https://ask.bioexcel.eu/c/BioExcel-Building-Blocks-library](https://ask.bioexcel.eu/c/BioExcel-Building-Blocks-library)
_____no_output_____
| {
"repository": "longr/biobb_wf_cwl_tutorial",
"path": "biobb_wf_cwl_tutorial/notebooks/biobb_CWL_tutorial.ipynb",
"matched_keywords": [
"molecular dynamics"
],
"stars": null,
"size": 69522,
"hexsha": "d0d82dc3f5fa7eca8b0d1a750aa066be9b91a019",
"max_line_length": 1423,
"avg_line_length": 48.5488826816,
"alphanum_fraction": 0.6298150226
} |
# Notebook from leylabmpi/16S-arc_vertebrate_paper
Path: 01_LLA/02_LLA_run_merge.ipynb
<h1>Table of Contents<span class="tocSkip"></span></h1>
<div class="toc"><ul class="toc-item"><li><span><a href="#Goal" data-toc-modified-id="Goal-1"><span class="toc-item-num">1 </span>Goal</a></span></li><li><span><a href="#Var" data-toc-modified-id="Var-2"><span class="toc-item-num">2 </span>Var</a></span></li><li><span><a href="#Init" data-toc-modified-id="Init-3"><span class="toc-item-num">3 </span>Init</a></span></li><li><span><a href="#Merging" data-toc-modified-id="Merging-4"><span class="toc-item-num">4 </span>Merging</a></span><ul class="toc-item"><li><span><a href="#SV-artifact" data-toc-modified-id="SV-artifact-4.1"><span class="toc-item-num">4.1 </span>SV artifact</a></span></li><li><span><a href="#rep-seqs" data-toc-modified-id="rep-seqs-4.2"><span class="toc-item-num">4.2 </span>rep-seqs</a></span></li><li><span><a href="#Taxonomy" data-toc-modified-id="Taxonomy-4.3"><span class="toc-item-num">4.3 </span>Taxonomy</a></span></li></ul></li><li><span><a href="#Alignment" data-toc-modified-id="Alignment-5"><span class="toc-item-num">5 </span>Alignment</a></span><ul class="toc-item"><li><span><a href="#Creating-alignment" data-toc-modified-id="Creating-alignment-5.1"><span class="toc-item-num">5.1 </span>Creating alignment</a></span></li><li><span><a href="#Masking-alignment" data-toc-modified-id="Masking-alignment-5.2"><span class="toc-item-num">5.2 </span>Masking alignment</a></span></li></ul></li><li><span><a href="#Phylogeny" data-toc-modified-id="Phylogeny-6"><span class="toc-item-num">6 </span>Phylogeny</a></span><ul class="toc-item"><li><span><a href="#Unrooted-tree" data-toc-modified-id="Unrooted-tree-6.1"><span class="toc-item-num">6.1 </span>Unrooted tree</a></span></li><li><span><a href="#Rooted-tree" data-toc-modified-id="Rooted-tree-6.2"><span class="toc-item-num">6.2 </span>Rooted tree</a></span></li></ul></li><li><span><a href="#sessionInfo" data-toc-modified-id="sessionInfo-7"><span class="toc-item-num">7 </span>sessionInfo</a></span></li></ul></div>_____no_output_____# Goal
* Merge results from all per-MiSeq-run `LLA` jobs
* Merging feature tables for multiple sequencing runs:
* MiSeq-Run0116
* MiSeq-Run0122
* MiSeq-Run0126
* **NOT** MiSeq-Run187 (failed run)
* MiSeq-run0189
* Then running standard processing:
* dataset summary
* taxonomy
* phylogeny_____no_output_____# Var_____no_output_____
<code>
work_dir = '/ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged/'
run_dir = '/ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/'
miseq_runs = c('Run0116', 'Run0122', 'Run0126', 'Run0189', 'Run0190')
# params
conda_env = 'qiime2-2019.10'
threads = 24_____no_output_____
</code>
# Init_____no_output_____
<code>
library(dplyr)
library(tidyr)
library(ggplot2)
library(LeyLabRMisc)
Attaching package: ‘dplyr’
The following objects are masked from ‘package:stats’:
filter, lag
The following objects are masked from ‘package:base’:
intersect, setdiff, setequal, union
df.dims()
make_dir(work_dir)Directory already exists: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged/
</code>
# Merging_____no_output_____## SV artifact_____no_output_____
<code>
# artifacts for individual runs
P = file.path(run_dir, '{run}', 'table_merged_filt.qza')
runs = miseq_runs %>% as.list %>%
lapply(function(x) glue::glue(P, run=x))
runs_____no_output_____# function to merge tables
merge_tables = function(in_tbls, out_tbl, conda_env){
cmd = 'qiime feature-table merge --i-tables {in_tbls} --o-merged-table {out_tbl} --p-overlap-method sum'
cmd = glue::glue(cmd, in_tbls=in_tbls, out_tbl=out_tbl)
cat('CMD:', cmd, '\n')
ret = bash_job(cmd, conda_env=conda_env, stderr=TRUE)
cat(ret, '\n')
return(out_tbl)
}_____no_output_____# merging
table_merged_file = file.path(work_dir, 'table_merged_filt.qza')
table_merged_file = merge_tables(paste(runs, collapse=' '), table_merged_file, conda_env)
cat('Output file:', table_merged_file, '\n')CMD: qiime feature-table merge --i-tables /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0116/table_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0122/table_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0126/table_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0189/table_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0190/table_merged_filt.qza --o-merged-table /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//table_merged_filt.qza --p-overlap-method sum
Saved FeatureTable[Frequency] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//table_merged_filt.qza
Output file: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//table_merged_filt.qza
</code>
## rep-seqs_____no_output_____
<code>
# artifacts for individual runs
P = file.path(run_dir, '{run}', 'rep-seqs_merged_filt.qza')
runs = miseq_runs %>% as.list %>%
lapply(function(x) glue::glue(P, run=x))
runs_____no_output_____# function to merge seqs
merge_seqs = function(in_seqs, out_seq, conda_env){
cmd = 'qiime feature-table merge-seqs --i-data {in_seqs} --o-merged-data {out_seq}'
cmd = glue::glue(cmd, in_seqs=in_seqs, out_tbl=out_seq)
cat('CMD:', cmd, '\n')
ret = bash_job(cmd, conda_env=conda_env, stderr=TRUE)
cat(ret, '\n')
return(out_seq)
}_____no_output_____# merging
seqs_merged_file = file.path(work_dir, 'rep-seqs_merged_filt.qza')
seqs_merged_file = merge_seqs(paste(runs, collapse=' '), seqs_merged_file, conda_env)
cat('Output file:', seqs_merged_file, '\n')CMD: qiime feature-table merge-seqs --i-data /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0116/rep-seqs_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0122/rep-seqs_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0126/rep-seqs_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0189/rep-seqs_merged_filt.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0190/rep-seqs_merged_filt.qza --o-merged-data /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//rep-seqs_merged_filt.qza
Saved FeatureData[Sequence] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//rep-seqs_merged_filt.qza
Output file: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//rep-seqs_merged_filt.qza
</code>
## Taxonomy_____no_output_____
<code>
# artifacts for individual runs
P = file.path(run_dir, '{run}', 'taxonomy.qza')
runs = miseq_runs %>% as.list %>%
lapply(function(x) glue::glue(P, run=x))
runs_____no_output_____# function to merge tax
merge_tax = function(in_taxs, out_tax, conda_env){
cmd = 'qiime feature-table merge-taxa --i-data {in_seqs} --o-merged-data {out_tax}'
cmd = glue::glue(cmd, in_seqs=in_taxs, out_tbl=out_tax)
cat('CMD:', cmd, '\n')
ret = bash_job(cmd, conda_env=conda_env, stderr=TRUE)
cat(ret, '\n')
return(out_tax)
}_____no_output_____# merging
tax_merged_file = file.path(work_dir, 'taxonomy.qza')
tax_merged_file = merge_tax(paste(runs, collapse=' '), tax_merged_file, conda_env)
cat('Output file:', tax_merged_file, '\n')CMD: qiime feature-table merge-taxa --i-data /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0116/taxonomy.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0122/taxonomy.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0126/taxonomy.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0189/taxonomy.qza /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA//Run0190/taxonomy.qza --o-merged-data /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//taxonomy.qza
Saved FeatureData[Taxonomy] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//taxonomy.qza
Output file: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//taxonomy.qza
</code>
# Alignment_____no_output_____## Creating alignment_____no_output_____
<code>
aln_file = file.path(work_dir, 'aligned-rep-seqs_filt.qza')
cmd = 'qiime alignment mafft --p-n-threads {threads} --i-sequences {in_seq} --o-alignment {out_aln}'
cmd = glue::glue(cmd, threads=threads, in_seq=seqs_merged_file, out_aln=aln_file)
bash_job(cmd, conda_env=conda_env, stderr=TRUE)
Saved FeatureData[AlignedSequence] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//aligned-rep-seqs_filt.qza
</code>
## Masking alignment_____no_output_____
<code>
aln_mask_file = file.path(work_dir, 'aligned-rep-seqs_filt_masked.qza')
cmd = 'qiime alignment mask --i-alignment {in_aln} --o-masked-alignment {out_aln}'
cmd = glue::glue(cmd, in_aln=aln_file, out_aln=aln_mask_file)
bash_job(cmd, conda_env=conda_env, stderr=TRUE)
Saved FeatureData[AlignedSequence] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//aligned-rep-seqs_filt_masked.qza
</code>
# Phylogeny_____no_output_____## Unrooted tree_____no_output_____
<code>
phy_unroot_file = file.path(work_dir, 'aligned-rep-seqs_filt_masked_unroot-tree.qza')
cmd = 'qiime phylogeny fasttree --p-n-threads {threads} --i-alignment {in_aln} --o-tree {out_phy}'
cmd = glue::glue(cmd, threads=threads, in_aln=aln_mask_file, out_phy=phy_unroot_file)
bash_job(cmd, conda_env=conda_env, stderr=TRUE)
Saved Phylogeny[Unrooted] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//aligned-rep-seqs_filt_masked_unroot-tree.qza
</code>
## Rooted tree_____no_output_____
<code>
phy_root_file = file.path(work_dir, 'aligned-rep-seqs_filt_masked_midroot-tree.qza')
cmd = 'qiime phylogeny midpoint-root --i-tree {in_phy} --o-rooted-tree {out_phy}'
cmd = glue::glue(cmd, in_phy=phy_unroot_file, out_phy=phy_root_file)
bash_job(cmd, conda_env=conda_env, stderr=TRUE)
Saved Phylogeny[Rooted] to: /ebio/abt3_projects/Georg_animal_feces/data/16S_arch/MiSeq-Runs-116-122-126-189-190/LLA/merged//aligned-rep-seqs_filt_masked_midroot-tree.qza
</code>
# sessionInfo_____no_output_____
<code>
sessionInfo()_____no_output_____
</code>
| {
"repository": "leylabmpi/16S-arc_vertebrate_paper",
"path": "01_LLA/02_LLA_run_merge.ipynb",
"matched_keywords": [
"QIIME2"
],
"stars": null,
"size": 31086,
"hexsha": "d0da06f6de7b21692556f59827c5bd6f73bdabea",
"max_line_length": 2208,
"avg_line_length": 39.5496183206,
"alphanum_fraction": 0.6131377469
} |
# Notebook from amaliestokholm/arxiv_on_deck
Path: Notebook.ipynb
<code>
# Run the scrapper
%run mpia.py -d today*** Matched author: Wel Arjen van der Wel
*** Matched author: Wu Po-Feng Wu
*** Matched author: Barisic Ivana Barisic
*** Matched author: Chauke Priscilla Chauke
*** Matched author: Houdt Josha van Houdt
*** Matched author: Pillepich Annalisa Pillepich
*** Matched author: Joshi Gandhali Joshi
*** Matched author: Gould Andrew Gould
*** Matched author: Martin Nicolas Martin
*** Matched author: Wang Jason J. Wang
*** Matched author: Zhu Zhaohuan Zhu
*** Matched author: Walter Alex B. Walter
*** Matched author: Soler J. D. Soler
*** Matched author: Beuther H. Beuther
*** Matched author: Rugel M. Rugel
*** Matched author: Wang Y. Wang
*** Matched author: Henning Th. Henning
*** Matched author: Kainulainen J. Kainulainen
*** Matched author: Mottram J. C. Mottram
*** Matched author: Lee Eve J. Lee
*** Matched author: Feldt M. Feldt
*** Matched author: Cantalloube F. Cantalloube
*** Matched author: Keppler M. Keppler
*** Matched author: Maire A.-L. Maire
*** Matched author: Mueller A. Mueller
*** Matched author: Samland M. Samland
*** Matched author: Henning T. Henning
*** Matched author: Henning T. Henning
*** Matched author: Wu Dong-Hong Wu
*** Matched author: Zhang Rachel C. Zhang
*** Matched author: Wang Shiang-Yu Wang
*** Matched author: Wu Gang Wu
*** Matched author: Bouwman Jordy Bouwman
*** Matched author: Bouwman Jordy Bouwman
*** Matched author: Avenhaus Henning Avenhaus
*** Matched author: Bertrang Gesa H. -M. Bertrang
*** Matched author: Schreiber Matthias R. Schreiber
*** Matched author: Jordán Andrés Jordán
*** Matched author: Espinoza Néstor Espinoza
*** Matched author: Henning Thomas Henning
*** Matched author: Rabus Markus Rabus
*** Matched author: Sarkis Paula Sarkis
*** Matched author: Ludwig H.-G. Ludwig
*** Matched author: Zhang Ming-Jian Zhang
*** Matched author: Martin R Martin
*** Matched author: Bailer-Jones C.A.L. Bailer-Jones
*** Matched author: Wang Ji Wang
[arXiv:1809.08236]: The Large Early Galaxy Astrophysics Census (LEGA-C) Data Release II: dynamical and stellar population properties of z ~< 1 galaxies in the COSMOS field
Caroline M. S. Straatman, \hl{Arjen van der Wel}, Rachel Bezanson, Camilla Pacifici, Anna Gallazzi, \hl{Po-Feng Wu}, Kai Noeske, \hl{Ivana Barisic}, Eric F. Bell, Gabriel B. Brammer, Joao Calhau, \hl{Priscilla Chauke}, Marijn Franx, \hl{Josha van Houdt}, Ivo Labbe, Michael V. Maseda, Juan C. Munoz-Mateos, Adam Muzzin, Jesse van de Sande, David Sobral, Justin S. Spilker
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\how}[1]{\textcolor{purple}{#1}}
\providecommand{\rA}{\mathrm{\AA}}
\providecommand{\msun}{\mathrm{M}_{\sun}}
\providecommand{\logm}{\mathrm{logM/\msun}}
\providecommand{\ha}{H$\alpha$}
\providecommand{\hb}{H$\beta$}
\providecommand{\hg}{H$\gamma$}
\providecommand{\hd}{H$\delta$}
\providecommand{\dfn}{D$4000_n$}
\providecommand{\hda}{\hd$_\mathrm{A}$}
\providecommand{\mgt}{$\mathrm{Mg_{2}}$}
\providecommand{\nuse}{1442}
\providecommand{\hblim}{$44.9\times10^{-19}\ \mathrm{ergs\ cm^{-2}\ s^{-1}}$}
\providecommand{\sfrlima}{$0.23\ \msun\ \mathrm{yr^{-1}}$}
\providecommand{\sfrlimb}{$0.47\ \msun\ \mathrm{yr^{-1}}$}
\providecommand{\sfrlimc}{$0.72\ \msun\ \mathrm{yr^{-1}}$}
\providecommand{\medianz}{$z_{\mathrm{spec}}=0.697$}
\providecommand{\sfrlimfinal}{$2.2\ \msun\ \mathrm{yr^{-1}}$}
\providecommand{\totn}{1988}
\providecommand{\prims}{1550}
\providecommand{\fills}{438}
**** From Heidelberg: True
Caroline M. S. Straatman, et al.; incl. \hl{Arjen van der Wel}, \hl{Po-Feng Wu}, \hl{Ivana Barisic}, \hl{Priscilla Chauke}, \hl{Josha van Houdt}
PDF postage: 1809.08236.pdf
[arXiv:1809.08239]: The optical morphologies of galaxies in the IllustrisTNG simulation: a comparison to Pan-STARRS observations
Vicente Rodriguez-Gomez, Gregory F. Snyder, Jennifer M. Lotz, Dylan Nelson, \hl{Annalisa Pillepich}, Volker Springel, Shy Genel, Rainer Weinberger, Sandro Tacchella, Ruediger Pakmor, Paul Torrey, Federico Marinacci, Mark Vogelsberger, Lars Hernquist, David A. Thilker
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\Msun}{{\rm M}_{\odot}}
\providecommand{\facc}{f_{\rm acc}}
\providecommand{\fex}{f_{\rm ex}}
\providecommand{\krot}{\kappa_{\rm rot}}
**** From Heidelberg: True
Vicente Rodriguez-Gomez, et al.; incl. \hl{Annalisa Pillepich}
PDF postage: 1809.08239.pdf
[arXiv:1809.08241]: Wide-Field Optical Spectroscopy of Abell 133: A Search for Filaments Reported in X-ray Observations
Thomas Connor, Daniel D. Kelson, John Mulchaey, Alexey Vikhlinin, Shannon G. Patel, Michael L. Balogh, \hl{Gandhali Joshi}, Ralph Kraft, Daisuke Nagai, Svetlana Starikova
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\vdag}{(v)^\dagger}\graphicspath{{./}{figures/}}
**** From Heidelberg: True
Thomas Connor, et al.; incl. \hl{Gandhali Joshi}
PDF postage: 1809.08241.pdf
[arXiv:1809.08243]: First Resolution of Microlensed Images
Subo Dong, (KIAA-PKU), , A. Mérand, F. Delplancke-Ströbele, (ESO), , \hl{Andrew Gould}, (MPIA, KASI, OSU), , Ping Chen, R. Post, C.S. Kochanek, K. Z. Stanek, G. W. Christie, Robert Mutel, T. Natusch, T. W.-S. Holoien, J. L. Prieto, B. J. Shappee, Todd A. Thompson
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\msun}{{\rm \ M_\odot}}
\providecommand{\bdv}[1]{\mbox{\boldmath$#1$}}
\providecommand{\bd}[1]{{\rm #1}}\def\au{{\rm AU}}
\def\sinc{{\rm sinc}}
\def\kms{{\rm km}\,{\rm s}^{-1}}
\def\masyr{{\rm mas}\,{\rm yr}^{-1}}
\def\kpc{{\rm kpc}}
\def\mas{{\rm mas}}
\def\sat{{\rm sat}}
\def\muas{\mu{\rm as}}
\def\var{{\rm var}}
\def\pc{{\rm pc}}
\def\orb{{\rm orb}}
\def\obs{{\rm obs}}
\def\max{{\rm max}}
\def\min{{\rm min}}
\def\rel{{\rm rel}}
\def\ast{{\rm ast}}
\def\eff{{\rm eff}}
\def\rot{{\rm rot}}
\def\lsr{{\rm lsr}}
\def\hel{{\rm hel}}
\def\geo{{\rm geo}}
\def\e{{\rm E}}
\def\bpi{{\bdv\pi}}
\def\bmu{{\bdv\mu}}
\def\balpha{{\bdv\alpha}}
\def\bgamma{{\bdv\gamma}}
\def\bDelta{{\bdv\Delta}}
\def\btheta{{\bdv\theta}}
\def\bphi{{\bdv\phi}}
\def\bp{{\bf p}}
\def\bv{{\bf v}}
\def\bu{{\bf u}}
\def\naive{{\rm naive}}
\def\revise{\bf}
**** From Heidelberg: True
Subo Dong, et al.; incl. \hl{Andrew Gould}
PDF postage: 1809.08243.pdf
[arXiv:1809.08245]: A-type stars in the Canada-France Imaging Survey I. The stellar halo of the Milky Way traced to large radius by blue horizontal branch stars
Guillaume F. Thomas, Alan W. McConnachie, Rodrigo A. Ibata, Patrick Côté, \hl{Nicolas Martin}, Else Starkenburg, Raymond Carlberg, Scott Chapman, Sébastien Fabbro, Benoit Famaey, Nicholas Fantin, Stephen Gwyn, Vincent Hénault-Brunet, Khyati Malhan, Julio Navarro, Annie C. Robin, Douglas Scott
extracting tarball...
*** Found macros and definitions in the header:
\def\ltsima{$\; \buildrel < \over \sim \;$}
\def\simlt{\lower.5ex\hbox{\ltsima}}
\def\gtsima{$\; \buildrel > \over \sim \;$}
\def\simgt{\lower.5ex\hbox{\gtsima}}
**** From Heidelberg: True
Guillaume F. Thomas, et al.; incl. \hl{Nicolas Martin}
PDF postage: 1809.08245.pdf
[arXiv:1809.08261]: A Bayesian Framework for Exoplanet Direct Detection and Non-Detection
Jean-Baptiste Ruffio, Dimitri Mawet, Ian Czekala, Bruce Macintosh, Robert J. De Rosa, Garreth Ruane, Michael Bottom, Laurent Pueyo, \hl{Jason J. Wang}, Lea Hirsch, \hl{Zhaohuan Zhu}, Eric L. Nielsen
extracting tarball...
multiple tex files
Found main document in: (0, './tmp/sample62.tex')
Found main document in: ./tmp/sample62.tex
0 ./tmp/sample62.tex
Found main document in: ./tmp/sample62.tex
*** Found document inclusions
input command: content
*** Found macros and definitions in the header:
\providecommand{\vdag}{(v)^\dagger}
\providecommand{\Secref}[1]{\hyperref[#1]{Section~\ref*{#1}}}
\providecommand{\Appref}[1]{\hyperref[#1]{Appendix~\ref*{#1}}}
\providecommand{\epseri}{$\epsilon$ Eridani}\graphicspath{{./}{figures/}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08301]: SCExAO, an instrument with a dual purpose: perform cutting-edge science and develop new technologies
Julien Lozi, Olivier Guyon, Nemanja Jovanovic, Sean Goebel, Prashant Pathak, Nour Skaf, Ananya Sahoo, Barnaby Norris, Frantz Martinache, Mamadou N'Diaye, Ben Mazin, \hl{Alex B. Walter}, Peter Tuthill, Tomoyuki Kudo, Hajime Kawahara, Takayuki Kotani, Michael Ireland, Nick Cvetojevic, Elsa Huby, Sylvestre Lacour, Sebastien Vievard, Tyler D. Groff, Jeffrey K. Chilcote, Jeremy Kasdin, Justin Knight, Frans Snik, David Doelman, Yosuke Minowa, Christophe Clergeon, Naruhisa Takato, Motohide Tamura, Thayne Currie, Hideki Takami, Masa Hayashi
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\baselinestretch}{1.0}
\providecommand{\mum}{\mbox{{\usefont{U}{eur}{m}{n}{\char22}}m}\xspace}
\providecommand{\mus}{\mbox{{\usefont{U}{eur}{m}{n}{\char22}}s}\xspace}
\providecommand{\sce}{\mbox{SCE\lowercase{x}AO}\xspace}
\providecommand{\e}[1]{10^{#1}}
\providecommand{\E}[1]{\times10^{#1}}
\providecommand{\lod}{\mbox{$\lambda$/D}\xspace}
\providecommand{\FIG}[3]{\includegraphics[width=#1\linewidth,draft=#2]{#3}}
\providecommand{\FIGH}[3]{\includegraphics[height=#1cm,draft=#2]{#3}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08338]: Histogram of oriented gradients: a technique for the study of molecular cloud formation
\hl{J. D. Soler}, \hl{H. Beuther}, \hl{M. Rugel}, \hl{Y. Wang}, L. D. Anderson, P. C. Clark, S. C. O. Glover, P. F. Goldsmith, A. Goodman, P. Hennebelle, \hl{Th. Henning}, M. Heyer, \hl{J. Kainulainen}, R. S. Klessen, N. M. McClure-Griffiths, K. M. Menten, \hl{J. C. Mottram}, S. E. Ragan, P. Schilke, R. J. Smith, J. S. Urquhart, F. Bigiel, N. Roy
extracting tarball...
multiple tex files
Found main document in: (0, './tmp/HIandCO.tex')
Found main document in: ./tmp/HIandCO.tex
0 ./tmp/HIandCO.tex
Found main document in: ./tmp/HIandCO.tex
*** Found document inclusions
input command: PIP_113_Boulanger_authors_and_institutes
*** print_tb
File "/home/jovyan/app.py", line 847, in _expand_auxilary_files
with open(directory + fname + '.tex', 'r', errors="surrogateescape") as fauxilary:
[Errno 2] No such file or directory: './tmp/PIP_113_Boulanger_authors_and_institutes.tex'
*** Found macros and definitions in the header:
\providecommand{\henrik}[1]{{\bf \color{green} [#1]}}
\providecommand{\hhenrik}[1]{}
\providecommand{\juan}[1]{{\bf \color{red} #1}}
\providecommand{\commentproof}[1]{{\bf \color{green}#1}}
\providecommand{\commentproof}[1]{}
\providecommand{\planck}{\Planck} \def\Herschel{\textit{Herschetowardl}}
\providecommand{\nh}{$N_{\textsc{H}}$}
\providecommand{\nhd}{N_{\textsc{H}}}
\providecommand{\gradnh}{$\mathbf{\nabla}N_{\textsc{H}}$}
\providecommand{\lognh}{$\log_{10}(N_{\textsc{H}}/\mbox{cm}^{-2})$}
\providecommand{\microG}{$\mu$G}
\providecommand{\bcf}{$B^{\textsc{DCF}}_{\perp}$}
\providecommand{\bhilde}{$B^{\textsc{\HIL}}_{\perp}$}
\providecommand{\IRAS}{\textit{IRAS\/}}
\providecommand{\WMAP}{\textit{WMAP\/}}
\providecommand{\COBE}{\textit{COBE\/}}
\providecommand{\Spitzer}{\textit{Spitzer\/}}
\providecommand{\healpix}{{\sc HEALPix}}
\providecommand{\sextractor}{{\sc SExtractor}}
\providecommand{\hii}{\ion{H}{II}}
\providecommand{\viewangle}{\alpha}
\providecommand{\bvect}{\vec{B}}
\providecommand{\planckurl}{\burl{http://www.rssd.esa.int/index.php?project=PLANCK&page=Planck_Collaboration}}
\providecommand{\sorthelp}[1]{}
\providecommand{\bperp}{$\langle\hat{\vec{B}}_{\perp}\rangle$}
\providecommand{\wc}{{\mkern 2mu\cdot\mkern 2mu}}
\providecommand{\prs}{$V$}
\providecommand{\mrv}{$r$}
\providecommand{\kps}{km\,s$^{-1}$}
\providecommand{\vhi}{$v_{\rm HI}$}
\providecommand{\vco}{$v_{\rm 13CO}$}
\providecommand{\vlsr}{$v_{\rm LSR}$}
\providecommand{\vlos}{$v_{\rm LOS}$}\def\bfc{}
\def\bfc{\bf}
\def\bfm{\bf \color{magenta}}
\def\bfm{} \newcommand{\commentproof}[1]{{\bf \color{green}#1}}
\def\Herschel{\textit{Herschetowardl}}
**** From Heidelberg: True
\hl{J. D. Soler}, et al.; incl. \hl{H. Beuther}, \hl{M. Rugel}, \hl{Y. Wang}, \hl{Th. Henning}, \hl{J. Kainulainen}, \hl{J. C. Mottram}
PDF postage: 1809.08338.pdf
[arXiv:1809.08348]: On The Nature of Variations in the Measured Star Formation Efficiency of Molecular Clouds
Michael Y. Grudić, Philip F. Hopkins, \hl{Eve J. Lee}, Norman Murray, Claude-André Faucher-Giguère, L. Clifton Johnson
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\msun}{M_{\sun}}
\providecommand{\oft}{\left(t\right)}\defcitealias{grudic:2016.sfe}{G18}
\defcitealias{lee:2016.gmc.eff}{L+16}
\defcitealias{vuti:2016.gmcs}{V+16}
\defcitealias{heyer:2016.clumps}{H+16}
\defcitealias{wu:2010.clumps}{W+10}
\defcitealias{evans:2014.sfe}{E+14}
\defcitealias{lada:2010.gmcs}{L+10}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08354]: beta Pictoris b post conjunction detection with VLT/SPHERE
A.-M. Lagrange, A. Boccaletti, M. Langlois, G. Chauvin, R. Gratton, H. Beust, S. Desidera, J. Milli, M. Bonnefoy, \hl{M. Feldt}, M. Meyer, A. Vigan, B. Biller, M. Bonavita, J.-L. Baudino, \hl{F. Cantalloube}, M. Cudel, S. Daemgen, P. Delorme, V. DOrazi, J. Girard, C. Fontanive, J. Hagelberg, M. Janson, \hl{M. Keppler}, T. Koypitova, R. Galicher, J. Lannier, H. Le Coroller, R. Ligi, \hl{A.-L. Maire}, D. Mesa, S. Messina, \hl{A. Mueller}, S. Peretti, C. Perrot, D. Rouan, G. Salter, \hl{M. Samland}, T. Schmidt, E. Sissa, A. Zurlo, J.-L. Beuzit, D. Mouillet, C. Dominik, \hl{T. Henning}, E. Lagadec, F. Menard, H.-M. Schmid, S. Udry, , the , SPHERE consortium
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\eg}{{\it e.g.}, }
\providecommand{\ie}{{\it i.e.}, }
\providecommand{\ms}{m\,s$^{\rm -1}$}
\providecommand{\kms}{km\,s$^{\rm -1}$}
\providecommand{\Mjup}{M$_{\rm Jup}$}
\providecommand{\mjup}{M$_{\rm Jup}$}
\providecommand{\mearth}{M$_{\rm Earth}$}
\providecommand{\Msun}{M$_{\sun}$}
\providecommand{\vsini}{$v\sin{i}$}
\providecommand{\elodie}{E{\small LODIE}}
\providecommand{\sophie}{S{\small OPHIE}}
\providecommand{\harps}{H{\small ARPS}}
\providecommand{\thetacyg}{$\theta$\,Cygni}
\providecommand{\bp}{$\beta$\,Pictoris\,}
\providecommand{\bpic}{$\beta$\,Pictoris\,}
\providecommand{\acena}{$\alpha$\,CenA\,}
\providecommand{\acenb}{$\alpha$\,CenB\,}
\providecommand{\plmo}{$^{+}_{-} $}
\providecommand{\muup}{$\mu$m}
**** From Heidelberg: True
A.-M. Lagrange, et al.; incl. \hl{M. Feldt}, \hl{F. Cantalloube}, \hl{M. Keppler}, \hl{A.-L. Maire}, \hl{A. Mueller}, \hl{M. Samland}, \hl{T. Henning}
PDF postage: 1809.08354.pdf
[arXiv:1809.08385]: Properties and occurrence rates of $Kepler$ exoplanet candidates as a function of host star metallicity from the DR25 catalog
M. Narang, (TIFR), , P. Manoj, (TIFR), , E. Furlan, (IPAC), , C. Mordasini, (Physikalisches Institut, Univ. of Bern), , \hl{T. Henning}, (MPIA), , B. Mathew, (Christ Univ.), , R. K. Banyal, (IIA), , T. Sivarani, (IIA)
extracting tarball...
*** Found macros and definitions in the header:
\def\teff {{$T_\mathrm{eff}$ }}
\def\Re {{$\,R_\oplus$ }}
\def\Me {{$\,M_\oplus$ }}
\def\Rj {{$\,R_J$ }}
\def\Mj {{$\,M_\mathrm{J}$ }}
\def\lg {{log$\,g$ }}
\def\pl {{planetary }}
\def\plr {{planetary radius }}
\def\plm {{planetary mass }}
\def \hsm {{host star metallicity }}
\def \hs {{host star }}
\def \sc {{SWEET-Cat }}
**** From Heidelberg: True
M. Narang, et al.; incl. \hl{T. Henning}
PDF postage: 1809.08385.pdf
[arXiv:1809.08499]: Dynamical instability and its implications for planetary system architecture
\hl{Dong-Hong Wu}, \hl{Rachel C. Zhang}, Ji-Lin Zhou, Jason H. Steffen
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\kepler}{\textit{Kepler}}\graphicspath{{./}{figures/}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08501]: Col-OSSOS: The Colours of the Outer Solar System Origins Survey
Megan E. Schwamb, Wesley C. Fraser, Michele T. Bannister, Michael Marsset, Rosemary E. Pike, J. J. Kavelaars, Susan D. Benecchi, Matthew J. Lehner, \hl{Shiang-Yu Wang}, Audrey Thirouin, Audrey Delsanti, Nuno Peixinho, Kathryn Volk, Mike Alexandersen, Ying-Tung Chen, Brett Gladman, Stephen D. J. Gwyn, Jean-Marc Petit
extracting tarball...
*** Found macros and definitions in the header:
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08739]: High-mass outflows identified from COHRS CO\,(3 - 2) Survey
Qiang Li, Jianjun Zhou, Jarken Esimbek, Yuxin He, W. A. Baan, Dalei Li, \hl{Gang Wu}, Xindi Tang, Weiguang Ji
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\vdag}{(v)^\dagger}
\providecommand{\RNum}[1]{\uppercase\expandafter{\romannumeral #1\relax}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08798]: Infrared Spectra of Hexa-peri-hexabenzocoronene Cations:HBC+ and HBC2+
Junfeng Zhen, Pablo Castellanos, \hl{Jordy Bouwman}, Harold Linnartz, Alexander G. G. M. Tielens
extracting tarball...
*** Found macros and definitions in the header:
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08800]: Laboratory gas-phase infrared spectra of two astronomically relevant PAH cations: diindenoperylene, C$_{32}$H$_{16}$$^+$ and dicoronylene, C$_{48}$H$_{20}$$^+$
Junfeng Zhen, Alessandra Candian, Pablo Castellanos, \hl{Jordy Bouwman}, Harold Linnartz, Alexander G. G. M. Tielens
*** print_tb
File "/home/jovyan/mpia.py", line 155, in main
s = paper.retrieve_document_source('./tmp')
File "/home/jovyan/app.py", line 1092, in retrieve_document_source
tar = tarfile.open(mode='r|gz', fileobj=urlopen(where))
File "/srv/conda/lib/python3.6/tarfile.py", line 1597, in open
stream = _Stream(name, filemode, comptype, fileobj, bufsize)
File "/srv/conda/lib/python3.6/tarfile.py", line 379, in __init__
self._init_read_gz()
File "/srv/conda/lib/python3.6/tarfile.py", line 484, in _init_read_gz
raise ReadError("not a gzip file")
not a gzip file
[arXiv:1809.08844]: The Ophiuchus DIsc Survey Employing ALMA (ODISEA) - I : project description and continuum images at 28 au resolution
Lucas A. Cieza, Dary Ruíz-Rodríguez, Antonio Hales, Simon Casassus, Sebastian Pérez, Camilo Gonzalez-Ruilova, Hector Cánovas, Jonathan P. Williams, Alice Zurlo, Megan Ansdell, \hl{Henning Avenhaus}, Amelia Bayo, \hl{Gesa H. -M. Bertrang}, Valentin Christiaens, William Dent, Gabriel Ferrero, Roberto Gamen, Johan Olofsson, Santiago Orcajo, Karla Peña Ramírez, David Principe, \hl{Matthias R. Schreiber}, Gerrit van der Plas
extracting tarball...
*** Found macros and definitions in the header:
**** From Heidelberg: True
Lucas A. Cieza, et al.; incl. \hl{Henning Avenhaus}, \hl{Gesa H. -M. Bertrang}, \hl{Matthias R. Schreiber}
PDF postage: 1809.08844.pdf
[arXiv:1809.08879]: EPIC 249451861b: an Eccentric Warm Saturn transiting a G-dwarf
\hl{Andrés Jordán}, Rafael Brahm, \hl{Néstor Espinoza}, Cristián Cortés, Matías Díaz, Holger Drass, \hl{Thomas Henning}, James S. Jenkins, Matías I. Jones, \hl{Markus Rabus}, Felipe Rojas, \hl{Paula Sarkis}, Maja Vučković, Abner Zapata, Maritza G. Soto, Gáspár Á. Bakos, Daniel Bayliss, Waqas Bhatti, Zoltan Csubry, Régis Lachaume, Víctor Moraga, Blake Pantoja, David Osip, Avi Shporer, Vincent Suc, Sergio Vásquez
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\vdag}{(v)^\dagger}
\providecommand{\feh}{\ensuremath{{\rm [Fe/H]}}}
\providecommand{\teff}{\ensuremath{T_{\rm eff}}}
\providecommand{\teq}{\ensuremath{T_{\rm eq}}}
\providecommand{\logg}{\ensuremath{\log{g}}}
\providecommand{\zaspe}{\texttt{ZASPE}}
\providecommand{\ceres}{\texttt{CERES}}
\providecommand{\vsini}{\ensuremath{v \sin{i}}}
\providecommand{\kms}{\ensuremath{{\rm km\,s^{-1}}}}
\providecommand{\mjup}{\ensuremath{{\rm M_{J}}}}
\providecommand{\mearth}{\ensuremath{{\rm M}_{\oplus}}}
\providecommand{\mpl}{\ensuremath{{\rm M_P}}}
\providecommand{\rjup}{\ensuremath{{\rm R_J}}}
\providecommand{\rpl}{\ensuremath{{\rm R_P}}}
\providecommand{\rstar}{\ensuremath{{\rm R}_{\star}}}
\providecommand{\mstar}{\ensuremath{{\rm M}_{\star}}}
\providecommand{\lstar}{\ensuremath{{\rm L}_{\star}}}
\providecommand{\rsun}{\ensuremath{{\rm R}_{\odot}}}
\providecommand{\msun}{\ensuremath{{\rm M}_{\odot}}}
\providecommand{\lsun}{\ensuremath{{\rm L}_{\odot}}}
\providecommand{\mpkep}{\ensuremath{0.315 \pm 0.027 }}
\providecommand{\rpkep}{\ensuremath{0.847 \pm 0.013 }}
\providecommand{\mskep}{\ensuremath{1.049_{-0.029}^{+0.021} }}
\providecommand{\rskep}{\ensuremath{1.085 \pm 0.010 }}
\providecommand{\per}{\ensuremath{14.893291 \pm 0.000025 }}
\providecommand{\ecc}{\ensuremath{0.478 \pm 0.026 }}
\providecommand{\sma}{\ensuremath{0.1204_{-0.0011}^{0.0008} }}
\providecommand{\plname}{EPIC~249451861b}
\providecommand{\stname}{EPIC~249451861}
\providecommand{\rhopl}{\ensuremath{{\rm \rho_P}}}
\providecommand{\rhopkep}{\ensuremath{1.154 \pm 0.045 }}
\providecommand{\gccm}{\ensuremath{\mathrm{g}\,\mathrm{cm}^{-3}}}\graphicspath{{./}{figures/}}
**** From Heidelberg: True
\hl{Andrés Jordán}, et al.; incl. \hl{Néstor Espinoza}, \hl{Thomas Henning}, \hl{Markus Rabus}, \hl{Paula Sarkis}
PDF postage: 1809.08879.pdf
[arXiv:1809.08904]: Influence of metallicity on the near-surface effect affecting oscillation frequencies
L. Manchon, K. Belkacem, R. Samadi, T. Sonoi, J. P. C. Marques, \hl{H.-G. Ludwig}, E. Caffau
extracting tarball...
*** Found macros and definitions in the header:
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1806.02981]: Observational constraint on the dark energy scalar field
\hl{Ming-Jian Zhang}, Hong Li
extracting tarball...
*** Found macros and definitions in the header:
\def\bea{\begin{eqnarray}}
\def\eea{\end{eqnarray}}
\def\be{\begin{equation}}
\def\ee{\end{equation}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.08969]: Experimental results from the ST7 mission on LISA Pathfinder
G Anderson, J Anderson, M Anderson, G Aveni, D Bame, P Barela, K Blackman, A Carmain, L Chen, M Cherng, S Clark, M Connally, W Connolly, D Conroy, M Cooper, C Cutler, J D'Agostino, N Demmons, E Dorantes, C Dunn, M Duran, E Ehrbar, J Evans, J Fernandez, G Franklin, M Girard, J Gorelik, V Hruby, O Hsu, D Jackson, S Javidnia, D Kern, M Knopp, R Kolasinski, C Kuo, T Le, I Li, O Liepack, A Littlefield, P Maghami, S Malik, L Markley, \hl{R Martin}, C Marrese-Reading, J Mehta, J Mennela, D Miller, D Nguyen, J O'Donnell, R Parikh, G Plett, T Ramsey, T Randolph, S Rhodes, A Romero-Wolf, T Roy, A Ruiz, H Shaw, J Slutsky, D Spence, J Stocky, J Tallon, I Thorpe, W Tolman, H Umfress, R Valencia, C Valerio, W Warner, J Wellman, P Willis, J Ziemer, J Zwahlen, M Armano, H Audley, J Baird, P Binetruy, , et al. (72 additional authors not shown)
extracting tarball...
*** Found macros and definitions in the header:
\providecommand{\subf}[2]{ {\small\begin{tabular}[t]{@{}c@{}}
\providecommand{\red}[1]{\textcolor{red}{\bf #1}}
\providecommand{\blue}[1]{\textcolor{blue}{\bf #1}}
\providecommand{\private}[1]{}
\providecommand{\braket}[2]{\left\langle#1\,|\,#2\,\right\rangle}
\providecommand{\expec}[1]{\langle#1\rangle}
\providecommand{\be}{\begin{equation}}
\providecommand{\ee}{\end{equation}}
\providecommand{\bea}{\begin{eqnarray}}
\providecommand{\eea}{\end{eqnarray}}
\providecommand{\bdm}{\begin{displaymath}}
\providecommand{\edm}{\end{displaymath}}
\providecommand{\drm}{{\rm d}}\def\lesssim{\mathrel{\hbox{\rlap{\hbox{\lower4pt\hbox{$\sim$}}}\hbox{$<$}}}}
\def\gtrsim{\mathrel{\hbox{\rlap{\hbox{\lower4pt\hbox{$\sim$}}}\hbox{$>$}}}}
\def\cos{\rm cos}
\def\sin{\rm sin}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
[arXiv:1809.09009]: Plausible home stars of the interstellar object 'Oumuamua found in Gaia DR2
\hl{C.A.L. Bailer-Jones}, (MPIA Heidelberg), , D. Farnocchia, (JPL), , K.J. Meech, (Uni. Hawai'i), , R. Brasser, (Tokyo Institute of Technology), , M. Micheli, (ESA SSA-NEO Coordination Centre), , S. Chakrabarti, (Rochester Institute of Technology), , M.W. Buie, (Southwest Research Institute), , O.R. Hainaut, (ESO)
extracting tarball...
*** Found document inclusions
input command: figures/cands41plushome4_encounters
*** Found macros and definitions in the header:
\providecommand{\deriv}{\ensuremath{\mathrm{d}}}
\providecommand{\given}{\ensuremath{\hspace{0.05em}\mid\hspace{0.05em}}}
\providecommand{\okina}{`}
\providecommand{\uone}{1I/2017~U1}
\providecommand{\oum}{{{\okina}Oumuamua}}
\providecommand{\Hawaii}{Hawai{\okina}i}
\providecommand{\rainf}{\ensuremath{\ra_\infty}}
\providecommand{\decinf}{\ensuremath{\dec_\infty}}
\providecommand{\vinf}{\ensuremath{v_\infty}}
\providecommand{\candsaa}{2\,k=2}
\providecommand{\candsbb}{3\,k=2}
\providecommand{\candscc}{3\,k=1}
\providecommand{\candsdd}{7d}
\providecommand{\candsee}{7e}
\providecommand{\candsff}{7c}
\providecommand{\gaia}{Gaia}
\providecommand{\gdr}[1]{Gaia\,DR{#1}}
\providecommand{\gmag}{\ensuremath{G}}
\providecommand{\mg}{M$_\gmag$}
\providecommand{\bprp}{BP-RP}
\providecommand{\teff}{\ensuremath{T_{\rm eff}}}
\providecommand{\tenc}{\ensuremath{t_{\rm enc}}}
\providecommand{\denc}{\ensuremath{d_{\rm enc}}}
\providecommand{\venc}{\ensuremath{v_{\rm enc}}}
\providecommand{\tenclma}{\ensuremath{t_{\rm enc}^{\rm lma}}}
\providecommand{\denclma}{\ensuremath{d_{\rm enc}^{\rm lma}}}
\providecommand{\venclma}{\ensuremath{v_{\rm enc}^{\rm lma}}}
\providecommand{\tencmed}{\ensuremath{t_{\rm enc}^{\rm med}}}
\providecommand{\dencmed}{\ensuremath{d_{\rm enc}^{\rm med}}}
\providecommand{\vencmed}{\ensuremath{v_{\rm enc}^{\rm med}}}
\providecommand{\ra}{\ensuremath{\alpha}}
\providecommand{\dec}{\ensuremath{\delta}}
\providecommand{\pmra}{\ensuremath{\mu_{\ra\ast}}}
\providecommand{\parallax}{\ensuremath{\varpi}}
\providecommand{\parzp}{\ensuremath{\varpi_{\rm zp}}}
\providecommand{\sigparallax}{\ensuremath{\sigma_{\varpi}}}
\providecommand{\pmdec}{\ensuremath{\mu_\dec}}
\providecommand{\propm}{\ensuremath{\mu}}
\providecommand{\vx}{\ensuremath{v_x}}
\providecommand{\vy}{\ensuremath{v_y}}
\providecommand{\vz}{\ensuremath{v_z}}
\providecommand{\sigmavx}{\ensuremath{\sigma(\vx)}}
\providecommand{\sigmavy}{\ensuremath{\sigma(\vy)}}
\providecommand{\sigmavz}{\ensuremath{\sigma(\vz)}}
\providecommand{\corvxvy}{\ensuremath{\rho(\vx, \vy)}}
\providecommand{\corvxvz}{\ensuremath{\rho(\vx, \vz)}}
\providecommand{\corvyvz}{\ensuremath{\rho(\vy, \vz)}}
\providecommand{\vr}{\ensuremath{v_r}}
\providecommand{\sigvr}{\ensuremath{\sigma(\vr)}}
\providecommand{\vtan}{\ensuremath{v_T}}
\providecommand{\rsol}{\ensuremath{r_\odot}}
\providecommand{\zsol}{\ensuremath{z_\odot}}
\providecommand{\glon}{\ensuremath{l}}
\providecommand{\glat}{\ensuremath{b}}
\providecommand{\rvec}{\ensuremath{\boldsymbol{r}}}
\providecommand{\vvec}{\ensuremath{\boldsymbol{v}}}
\providecommand{\kms}{\ensuremath{\textrm{km\,s}^{-1}}}
\providecommand{\maspyr}{\ensuremath{\textrm{mas\,yr}^{-1}}}
\providecommand{\msun}{\ensuremath{M_\odot}}
\providecommand{\degree}{\ensuremath{^\circ}}
\providecommand{\red}{\textcolor{red}}
\providecommand{\blue}{\textcolor{blue}}\def\myeol{\\}
\definecolor{VeryDarkBlue}{RGB}{0,0,80}
\definecolor{VeryDarkRed}{RGB}{90,0,00}
**** From Heidelberg: True
\hl{C.A.L. Bailer-Jones}, et al.; incl.
PDF postage: 1809.09009.pdf
[arXiv:1809.09080]: Detecting Water In the atmosphere of HR 8799 c with L-band High Dispersion Spectroscopy Aided By Adaptive Optics
\hl{Ji Wang}, Dimitri Mawet Jonathan J. Fortney, Callie Hood, Caroline V. Morley, Bjorn Benneke
extracting tarball...
multiple tex files
Found main document in: (0, './tmp/ms.tex')
Found main document in: ./tmp/ms.tex
0 ./tmp/ms.tex
Found main document in: ./tmp/ms.tex
*** Found document inclusions
input command: obs_summary
input command: Telescope_Instrument
input command: HR8799c
input command: Simulation_results
input command: Telescope_Instrument_Sun_Earth
*** print_tb
File "/home/jovyan/app.py", line 847, in _expand_auxilary_files
with open(directory + fname + '.tex', 'r', errors="surrogateescape") as fauxilary:
[Errno 2] No such file or directory: './tmp/Telescope_Instrument_Sun_Earth.tex'
input command: Sun_Earth
*** print_tb
File "/home/jovyan/app.py", line 847, in _expand_auxilary_files
with open(directory + fname + '.tex', 'r', errors="surrogateescape") as fauxilary:
[Errno 2] No such file or directory: './tmp/Sun_Earth.tex'
*** Found macros and definitions in the header:
\providecommand{\totaltargets}{138 }
\providecommand{\totalplanets}{97 }
\providecommand{\totalmulti}{27 }
\providecommand{\detectstar}{42 }
\providecommand{\detectsys}{35 }
\providecommand{\rvstar}{22 }
\providecommand{\myaostar}{60 }
\providecommand{\myaopalomar}{68 }
\providecommand{\myaokeck}{5 }
\providecommand{\myaototal}{73 }
\providecommand{\myaototalno}{65 }
\providecommand{\myaonewstar}{29 }
\providecommand{\myaonewsys}{22 }
\providecommand{\myaonewcolor}{8 }
\providecommand{\myaomiss}{11 }
\providecommand{\multicolor}{21 }
\providecommand{\multicolorod}{6 }
\providecommand{\multicolorsubarc}{5 }
\providecommand{\singleall}{38 }
\providecommand{\singleK}{29 }
\providecommand{\starK}{51 }
\providecommand{\rhk}{\mbox{$\log R^\prime_{\rm HK}$}}
\providecommand{\vdag}{(v)^\dagger}\def\au{\mbox{au}}
**** From Heidelberg: False
*** print_tb
File "/home/jovyan/mpia.py", line 160, in main
raise RuntimeError('Not an institute paper')
Not an institute paper
Issues ===============================
[arXiv:1809.08261] Jason J. Wang, Zhaohuan Zhu
Not an institute paper
[arXiv:1809.08301] Alex B. Walter
Not an institute paper
[arXiv:1809.08348] Eve J. Lee
Not an institute paper
[arXiv:1809.08499] Dong-Hong Wu, Rachel C. Zhang
Not an institute paper
[arXiv:1809.08501] Shiang-Yu Wang
Not an institute paper
[arXiv:1809.08739] Gang Wu
Not an institute paper
[arXiv:1809.08798] Jordy Bouwman
Not an institute paper
[arXiv:1809.08800] Jordy Bouwman
not a gzip file
[arXiv:1809.08904] H.-G. Ludwig
Not an institute paper
[arXiv:1806.02981] Ming-Jian Zhang
Not an institute paper
[arXiv:1809.08969] R Martin
Not an institute paper
[arXiv:1809.09080] Ji Wang
Not an institute paper
Matched Authors ======================
[arXiv:1809.08236] Wel Arjen van der Wel
[arXiv:1809.08236] Wu Po-Feng Wu
[arXiv:1809.08236] Barisic Ivana Barisic
[arXiv:1809.08236] Chauke Priscilla Chauke
[arXiv:1809.08236] Houdt Josha van Houdt
[arXiv:1809.08239] Pillepich Annalisa Pillepich
[arXiv:1809.08241] Joshi Gandhali Joshi
[arXiv:1809.08243] Gould Andrew Gould
[arXiv:1809.08245] Martin Nicolas Martin
[arXiv:1809.08261] Wang Jason J. Wang
[arXiv:1809.08261] Zhu Zhaohuan Zhu
[arXiv:1809.08301] Walter Alex B. Walter
[arXiv:1809.08338] Soler J. D. Soler
[arXiv:1809.08338] Beuther H. Beuther
[arXiv:1809.08338] Rugel M. Rugel
[arXiv:1809.08338] Wang Y. Wang
[arXiv:1809.08338] Henning Th. Henning
[arXiv:1809.08338] Kainulainen J. Kainulainen
[arXiv:1809.08338] Mottram J. C. Mottram
[arXiv:1809.08348] Lee Eve J. Lee
[arXiv:1809.08354] Feldt M. Feldt
[arXiv:1809.08354] Cantalloube F. Cantalloube
[arXiv:1809.08354] Keppler M. Keppler
[arXiv:1809.08354] Maire A.-L. Maire
[arXiv:1809.08354] Mueller A. Mueller
[arXiv:1809.08354] Samland M. Samland
[arXiv:1809.08354] Henning T. Henning
[arXiv:1809.08385] Henning T. Henning
[arXiv:1809.08499] Wu Dong-Hong Wu
[arXiv:1809.08499] Zhang Rachel C. Zhang
[arXiv:1809.08501] Wang Shiang-Yu Wang
[arXiv:1809.08739] Wu Gang Wu
[arXiv:1809.08798] Bouwman Jordy Bouwman
[arXiv:1809.08800] Bouwman Jordy Bouwman
[arXiv:1809.08844] Avenhaus Henning Avenhaus
[arXiv:1809.08844] Bertrang Gesa H. -M. Bertrang
[arXiv:1809.08844] Schreiber Matthias R. Schreiber
[arXiv:1809.08879] Jordán Andrés Jordán
[arXiv:1809.08879] Espinoza Néstor Espinoza
[arXiv:1809.08879] Henning Thomas Henning
[arXiv:1809.08879] Rabus Markus Rabus
[arXiv:1809.08879] Sarkis Paula Sarkis
[arXiv:1809.08904] Ludwig H.-G. Ludwig
[arXiv:1806.02981] Zhang Ming-Jian Zhang
[arXiv:1809.08969] Martin R Martin
[arXiv:1809.09009] Bailer-Jones C.A.L. Bailer-Jones
[arXiv:1809.09080] Wang Ji Wang
Compiled outputs =====================
[arXiv:1809.08236] Arjen van der Wel, Po-Feng Wu, Ivana Barisic, Priscilla Chauke, Josha van Houdt
[arXiv:1809.08239] Annalisa Pillepich
[arXiv:1809.08241] Gandhali Joshi
[arXiv:1809.08243] Andrew Gould
[arXiv:1809.08245] Nicolas Martin
[arXiv:1809.08338] J. D. Soler, H. Beuther, M. Rugel, Y. Wang, Th. Henning, J. Kainulainen, J. C. Mottram
[arXiv:1809.08354] M. Feldt, F. Cantalloube, M. Keppler, A.-L. Maire, A. Mueller, M. Samland, T. Henning
[arXiv:1809.08385] T. Henning
[arXiv:1809.08844] Henning Avenhaus, Gesa H. -M. Bertrang, Matthias R. Schreiber
[arXiv:1809.08879] Andrés Jordán, Néstor Espinoza, Thomas Henning, Markus Rabus, Paula Sarkis
[arXiv:1809.09009] C.A.L. Bailer-Jones
# Some current security measures prevent loading properly pdf previews.
# Converting pdfs into pngs.
!for f in `ls *pdf`; do echo ${f} && convert ${f} ${f}.png; done1809.08236.pdf
1809.08239.pdf
1809.08241.pdf
# Display preview of the compiled outputs
from IPython.display import IFrame, HTML
from glob import glob
code = ''
for fname in glob('*.png'):
code += IFrame(fname, width=600, height=1000)._repr_html_()
HTML(code)_____no_output_____
</code>
| {
"repository": "amaliestokholm/arxiv_on_deck",
"path": "Notebook.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 48645,
"hexsha": "d0dbc4386c8b75bb95074898fccf5ce9aca56a56",
"max_line_length": 853,
"avg_line_length": 52.9325353645,
"alphanum_fraction": 0.5524308768
} |
# Notebook from wd15/chimad-phase-field
Path: hackathons/hackathon1/fipy/1c.ipynb
# Table of Contents
* [1c. Fixed flux spinodal decomposition on a T shaped domain](#1c.-Fixed-flux-spinodal-decomposition-on-a-T-shaped-domain)
* [Use Binder For Live Examples](#Use-Binder-For-Live-Examples)
* [Define $f_0$](#Define-$f_0$)
* [Define the Equation](#Define-the-Equation)
* [Solve the Equation](#Solve-the-Equation)
* [Run the Example Locally](#Run-the-Example-Locally)
* [Movie of Evolution](#Movie-of-Evolution)
_____no_output_____# 1c. Fixed flux spinodal decomposition on a T shaped domain_____no_output_____## Use Binder For Live Examples_____no_output_____[](http://mybinder.org/repo/wd15/fipy-hackathon1)_____no_output_____The free energy is given by,
$$ f_0\left[ c \left( \vec{r} \right) \right] =
- \frac{A}{2} \left(c - c_m\right)^2
+ \frac{B}{4} \left(c - c_m\right)^4
+ \frac{c_{\alpha}}{4} \left(c - c_{\alpha} \right)^4
+ \frac{c_{\beta}}{4} \left(c - c_{\beta} \right)^4 $$
In FiPy we write the evolution equation as
$$ \frac{\partial c}{\partial t} = \nabla \cdot \left[
D \left( c \right) \left( \frac{ \partial^2 f_0 }{ \partial c^2} \nabla c - \kappa \nabla \nabla^2 c \right)
\right] $$
Let's start by calculating $ \frac{ \partial^2 f_0 }{ \partial c^2} $ using sympy. It's easy for this case, but useful in the general case for taking care of difficult book keeping in phase field problems._____no_output_____
<code>
%matplotlib inline
import sympy
import fipy as fp
import numpy as np_____no_output_____A, c, c_m, B, c_alpha, c_beta = sympy.symbols("A c_var c_m B c_alpha c_beta")_____no_output_____f_0 = - A / 2 * (c - c_m)**2 + B / 4 * (c - c_m)**4 + c_alpha / 4 * (c - c_alpha)**4 + c_beta / 4 * (c - c_beta)**4_____no_output_____print f_0-A*(-c_m + c_var)**2/2 + B*(-c_m + c_var)**4/4 + c_alpha*(-c_alpha + c_var)**4/4 + c_beta*(-c_beta + c_var)**4/4
sympy.diff(f_0, c, 2)_____no_output_____
</code>
The first step in implementing any problem in FiPy is to define the mesh. For [Problem 1a]({{ site.baseurl }}/hackathons/hackathon1/problems.ipynb/#1.a-Square-Periodic) the solution domain is just a square domain, but the boundary conditions are periodic, so a `PeriodicGrid2D` object is used. No other boundary conditions are required._____no_output_____
<code>
mesh = fp.Grid2D(dx=0.5, dy=0.5, nx=40, ny=200) + (fp.Grid2D(dx=0.5, dy=0.5, nx=200, ny=40) + [[-40],[100]])_____no_output_____
</code>
The next step is to define the parameters and create a solution variable._____no_output_____
<code>
c_alpha = 0.05
c_beta = 0.95
A = 2.0
kappa = 2.0
c_m = (c_alpha + c_beta) / 2.
B = A / (c_alpha - c_m)**2
D = D_alpha = D_beta = 2. / (c_beta - c_alpha)
c_0 = 0.45
q = np.sqrt((2., 3.))
epsilon = 0.01
c_var = fp.CellVariable(mesh=mesh, name=r"$c$", hasOld=True)_____no_output_____
</code>
Now we need to define the initial conditions given by,
Set $c\left(\vec{r}, t\right)$ such that
$$ c\left(\vec{r}, 0\right) = \bar{c}_0 + \epsilon \cos \left( \vec{q} \cdot \vec{r} \right) $$_____no_output_____
<code>
r = np.array((mesh.x, mesh.y))
c_var[:] = c_0 + epsilon * np.cos((q[:, None] * r).sum(0))
viewer = fp.Viewer(c_var)_____no_output_____
</code>
## Define $f_0$_____no_output_____To define the equation with FiPy first define `f_0` in terms of FiPy. Recall `f_0` from above calculated using Sympy. Here we use the string representation and set it equal to `f_0_var` using the `exec` command._____no_output_____
<code>
out = sympy.diff(f_0, c, 2)_____no_output_____exec "f_0_var = " + repr(out)_____no_output_____#f_0_var = -A + 3*B*(c_var - c_m)**2 + 3*c_alpha*(c_var - c_alpha)**2 + 3*c_beta*(c_var - c_beta)**2
f_0_var_____no_output_____
</code>
## Define the Equation_____no_output_____
<code>
eqn = fp.TransientTerm(coeff=1.) == fp.DiffusionTerm(D * f_0_var) - fp.DiffusionTerm((D, kappa))
eqn_____no_output_____
</code>
## Solve the Equation_____no_output_____To solve the equation a simple time stepping scheme is used which is decreased or increased based on whether the residual decreases or increases. A time step is recalculated if the required tolerance is not reached._____no_output_____
<code>
elapsed = 0.0
steps = 0
dt = 0.01
total_sweeps = 2
tolerance = 1e-1
total_steps = 10_____no_output_____c_var[:] = c_0 + epsilon * np.cos((q[:, None] * r).sum(0))
c_var.updateOld()
from fipy.solvers.pysparse import LinearLUSolver as Solver
solver = Solver()
while steps < total_steps:
res0 = eqn.sweep(c_var, dt=dt, solver=solver)
for sweeps in range(total_sweeps):
res = eqn.sweep(c_var, dt=dt, solver=solver)
if res < res0 * tolerance:
steps += 1
elapsed += dt
dt *= 1.1
c_var.updateOld()
else:
dt *= 0.8
c_var[:] = c_var.old
viewer.plot()
print 'elapsed_time:',elapsed_____no_output_____
</code>
## Run the Example Locally_____no_output_____The following cell will dumpy a file called `fipy_hackathon1c.py` to the local file system to be run. The images are saved out at each time step._____no_output_____
<code>
%%writefile fipy_hackathon_1c.py
import fipy as fp
import numpy as np
mesh = fp.Grid2D(dx=0.5, dy=0.5, nx=40, ny=200) + (fp.Grid2D(dx=0.5, dy=0.5, nx=200, ny=40) + [[-40],[100]])
c_alpha = 0.05
c_beta = 0.95
A = 2.0
kappa = 2.0
c_m = (c_alpha + c_beta) / 2.
B = A / (c_alpha - c_m)**2
D = D_alpha = D_beta = 2. / (c_beta - c_alpha)
c_0 = 0.45
q = np.sqrt((2., 3.))
epsilon = 0.01
c_var = fp.CellVariable(mesh=mesh, name=r"$c$", hasOld=True)
r = np.array((mesh.x, mesh.y))
c_var[:] = c_0 + epsilon * np.cos((q[:, None] * r).sum(0))
f_0_var = -A + 3*B*(c_var - c_m)**2 + 3*c_alpha*(c_var - c_alpha)**2 + 3*c_beta*(c_var - c_beta)**2
eqn = fp.TransientTerm(coeff=1.) == fp.DiffusionTerm(D * f_0_var) - fp.DiffusionTerm((D, kappa))
elapsed = 0.0
steps = 0
dt = 0.01
total_sweeps = 2
tolerance = 1e-1
total_steps = 600
c_var[:] = c_0 + epsilon * np.cos((q[:, None] * r).sum(0))
c_var.updateOld()
from fipy.solvers.pysparse import LinearLUSolver as Solver
solver = Solver()
viewer = fp.Viewer(c_var)
while steps < total_steps:
res0 = eqn.sweep(c_var, dt=dt, solver=solver)
for sweeps in range(total_sweeps):
res = eqn.sweep(c_var, dt=dt, solver=solver)
print ' '
print 'steps',steps
print 'res',res
print 'sweeps',sweeps
print 'dt',dt
if res < res0 * tolerance:
steps += 1
elapsed += dt
dt *= 1.1
if steps % 1 == 0:
viewer.plot('image{0}.png'.format(steps))
c_var.updateOld()
else:
dt *= 0.8
c_var[:] = c_var.oldOverwriting fipy_hackathon_1c.py
</code>
## Movie of Evolution_____no_output_____The movie of the evolution for 600 steps.
The movie was generated with the output files of the form `image*.png` using the following commands,
$ rename 's/\d+/sprintf("%05d",$&)/e' image*
$ ffmpeg -f image2 -r 6 -i 'image%05d.png' output.mp4_____no_output_____
<code>
from IPython.display import YouTubeVideo
scale = 1.5
YouTubeVideo('aZk38E7OxcQ', width=420 * scale, height=315 * scale, rel=0)_____no_output_____
</code>
| {
"repository": "wd15/chimad-phase-field",
"path": "hackathons/hackathon1/fipy/1c.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 11,
"size": 113840,
"hexsha": "d0dd60e90f92b09b9b0c6c33dd039e044fab587c",
"max_line_length": 74464,
"avg_line_length": 205.1171171171,
"alphanum_fraction": 0.8982782853
} |
# Notebook from bastivkl/nh2020-curriculum
Path: we-visualizing-eeg-hamilton/EEG_viz_mne.ipynb
# Visualizing invasive and non-invasive EEG data
[Liberty Hamilton, PhD](https://csd.utexas.edu/research/hamilton-lab)
Assistant Professor, University of Texas at Austin
Department of Speech, Language, and Hearing Sciences
and Department of Neurology, Dell Medical School
Welcome! In this notebook we will be discussing how to look at time series electrophysiological 🧠 data that is recorded noninvasively at the scalp (scalp electroencephalography or EEG), or invasively in patients who are undergoing surgical treatment for epilepsy (sometimes called intracranial EEG or iEEG, also called stereo EEG/sEEG, or electrocorticography/ECoG).
### Python libraries you will be using in this tutorial:
* MNE-python
* matplotlib
* numpy

MNE-python is open source python software for exploring and analyzing human neurophysiological data (EEG/MEG/iEEG).
### What you will learn to do
* Load some sample EEG data
* Load some sample intracranial EEG data
* Plot the raw EEG data/iEEG data
* Plot the power spectrum of your data
* Epoch data according to specific task conditions (sentences)
* Plot all epochs and averaged evoked activity
* Plot average evoked activity in response to specific task conditions (ERPs)
* Plot by channel as well as averaging across channels
* Plot EEG activity at specific time points on the scalp (topomaps)
* Customize your plots
### Other Resources:
* [MNE-python tutorials](https://mne.tools/stable/auto_tutorials/index.html) -- This has many additional resources above and beyond that also include how to preprocess your data, remove artifacts, and more!_____no_output_____<a id="basics1"></a>
# 1. The basics: loading in your data_____no_output_____
<code>
!pip install matplotlib==3.2
import mne # This is the mne library
import numpy as np # This gives us the power of numpy, which is just generally useful for array manipulation
%matplotlib inline
from matplotlib import pyplot as plt
from matplotlib import cm
datasets = {'ecog': '/home/jovyan/data/we_eeg_viz_data/ecog/sub-S0006/S0006_ecog_hg.fif',
'eeg': '/home/jovyan/data/we_eeg_viz_data/eeg/sub-MT0002/MT0002-eeg.fif'}
event_files = {'ecog': '/home/jovyan/data/we_eeg_viz_data/ecog/sub-S0006/S0006_eve.txt',
'eeg': '/home/jovyan/data/we_eeg_viz_data/eeg/sub-MT0002/MT0002_eve.txt'}
stim_file = '/home/jovyan/data/we_eeg_viz_data/stimulus_list.csv'_____no_output_____# Get some information about the stimuli (here, the names of the sound files that were played)
ev_names=np.genfromtxt(stim_file, skip_header=1, delimiter=',',dtype=np.str, usecols=[1],encoding='utf-8')
ev_nums=np.genfromtxt(stim_file, skip_header=1, delimiter=',',dtype=np.int, usecols=[0], encoding='utf-8')
event_id = dict()
for i, ev_name in enumerate(ev_names):
event_id[ev_name] = ev_nums[i]_____no_output_____
</code>
## 1.1. Choose which dataset to look at (start with EEG)
For the purposes of this tutorial, we'll be looking at some scalp EEG and intracranial EEG datasets from my lab. Participants provided written informed consent for participation in our research. These data were collected from two distinct participants listening to sentences from the [TIMIT acoustic-phonetic corpus](https://catalog.ldc.upenn.edu/LDC93S1). This is a database of English sentences spoken by multiple talkers from throughout the United States, and has been used in speech recognition research, neuroscience research, and more!
The list of stimuli is in the `stimulus_list.csv` file. Each stimulus starts with either a "f" or a "m" to indicate a female or male talker. The rest of the alphanumeric string has to do with other characteristics of the talkers that we won't go into here. The stimulus timings have been provided for you in the event files (ending with the suffix `_eve.txt`. We'll talk about those more later.
### EEG Data
The EEG data was recorded with a 64-channel [BrainVision ActiCHamp](https://www.brainproducts.com/productdetails.php?id=74) system. These data are part of an ongoing project in our lab and are unpublished. You can find similar (larger) datasets from [Broderick et al.](https://datadryad.org/stash/dataset/doi:10.5061/dryad.070jc), or Bradley Voytek's lab has a list of [Open Electrophysiology datasets](https://github.com/openlists/ElectrophysiologyData).
### The ECoG Data
The ECoG data was recorded from 106 electrodes across multiple regions of the brain while our participant listened to TIMIT sentences. This is a smaller subset of sentences than the EEG dataset and so is a bit faster to load. The areas we recorded from are labeled according to a clinical montage. For iEEG and ECoG datasets, these names are rarely standardized, so it can be hard to know exactly what is what without additional information. Here, each channel is named according to the general location of the electrode probe to which it belongs.
| Device | General location |
|---|---|
| RAST | Right anterior superior temporal |
| RMST | Right middle superior temporal |
| RPST | Right posterior superior temporal |
| RPPST | Right posterior parietal/superior temporal |
| RAIF | Right anterior insula |
| RPI | Right posterior insula |
| ROF | Right orbitofrontal |
| RAC | Right anterior cingulate |_____no_output_____
<code>
data_type = 'eeg' # Can choose from 'eeg' or 'ecog'_____no_output_____
</code>
## 1.2. Load the data
This next command loads the data from our fif file of interest. The `preload=True` flag means that the data will be loaded (necessary for some operations). If `preload=False`, you can still perform some aspects of this tutorial, and this is a great option if you have a large dataset and would like to look at some of the header information and metadata before you start to analyze it._____no_output_____
<code>
raw = mne.io.read_raw_fif(datasets[data_type], preload=True) _____no_output_____
</code>
There is a lot of useful information in the info structure. For example, we can get the sampling frequency (`raw.info['sfreq']`), the channel names (`raw.info['ch_names']`), the channel types and locations (in `raw.info['chs']`), and whether any filtering operations have been performed already (`raw.info['highpass']` and `raw.info['lowpass']` show the cut-offs for the data)._____no_output_____
<code>
print(raw.info)_____no_output_____sampling_freq = raw.info['sfreq']
nchans = raw.info['nchan']
print('The sampling frequency of our data is %d'%(sampling_freq))
print('Here is our list of %d channels: '%nchans)
print(raw.ch_names)_____no_output_____eeg_colors = {'eeg': 'k', 'eog': 'steelblue'}
fig = raw.plot(show=False, color=eeg_colors, scalings='auto');
fig.set_figwidth(8)
fig.set_figheight(4)
_____no_output_____
</code>
<a id="plots2"></a>
# 2. Let's make some plots!
MNE-python makes creating some plots *super easy*, which is great for data quality checking, exploration, and eventually manuscript figure generation. For example, one might wish to plot the power spectral density (PSD), which
## 2.2. Power spectral density_____no_output_____
<code>
raw.plot_psd();_____no_output_____
</code>
## 2.3. Sensor positions (for EEG)
For EEG, MNE-python also has convenient functions for showing the location of the sensors used. Here, we have a 64-channel montage. You can also use this information to help interpret some of your plots if you're plotting a single channel or a group of channels.
For ECoG, we will not be plotting sensors in this way. If you would like read more about that process, please see [this tutorial](https://mne.tools/stable/auto_tutorials/misc/plot_ecog.html). You can also check out [Noah Benson's session](https://neurohackademy.org/course/introduction-to-the-geometry-and-structure-of-the-human-brain/) (happening in parallel with this tutorial!) for plotting 3D brains._____no_output_____
<code>
if data_type == 'eeg':
raw.plot_sensors(kind='topomap',show_names=True);
_____no_output_____
</code>
Ok, awesome! So now we know where the sensors are, how densely they tile the space, and what their names are. *Knowledge = Power!*
So what if we wanted to look at the power spectral density plot we saw above by channel? We can use `plot_psd_topo` for that! There are also customizable options for playing with the colors._____no_output_____
<code>
if data_type == 'eeg':
raw.plot_psd_topo(fig_facecolor='w', axis_facecolor='w', color='k');_____no_output_____
</code>
Finally, this one works for both EEG and ECoG. Here we are looking at the power spectral density plot again, but taking the average across trials and showing +/- 1 standard deviation from the mean across channels. _____no_output_____
<code>
raw.plot_psd(area_mode='std', average=True);_____no_output_____
</code>
Finally, we can plot these same figures using a narrower frequency range, and looking at a smaller set of channels using `picks`. For `plot_psd` and other functions, `picks` is a list of integer indices corresponding to your channels of interest. You can choose these by their number, or you can use the convenient `mne.pick_channels` function to choose them by name. For example, in EEG, we often see strong responses to auditory stimuli at the top of the head, so here we will restrict our EEG channels to a few at the top of the head at the midline. For ECoG, we are more likely to see responses to auditory stimuli in temporal lobe electrodes (potentially RPPST, RPST, RMST, RAST), so we'll try those._____no_output_____
<code>
if data_type == 'eeg':
picks = mne.pick_channels(raw.ch_names, include=['Pz','CPz','Cz','FCz','Fz','C1','C2','FC1','FC2','CP1','CP2'])
elif data_type == 'ecog':
picks = mne.pick_channels(raw.ch_names, include=['RPPST9','RPPST10','RPPST11'])
raw.plot_psd(picks = picks, fmin=1, fmax=raw.info['sfreq']/2, xscale='log');_____no_output_____
</code>
## Plotting responses to events
Ok, so this is all well and good. We can plot our raw data, the power spectrum, and the locations of the sensors. But what if we care about responses to the stimuli we described above? What if we want to look at responses to specific sentences, or the average response across all sentences, or something else? How can we determine which EEG sensors or ECoG electrodes respond to the speech stimuli?
Enter.... *Epoching!* MNE-python gives you a very convenient way of rearranging your data according to events of interest. These can actually even be found automatically from a stimulus channel, if you have one (using [`mne.find_events`](https://mne.tools/stable/generated/mne.find_events.html)), which we won't use here because we already have the timings from another procedure. You can also find other types of epochs, like those based on EMG or [eye movements (EOG)](https://mne.tools/stable/generated/mne.preprocessing.find_eog_events.html).
Here, we will load our event files (ending with `_eve.txt`). These contain information about the start sample, stop sample, and event ID for each stimulus. Each row in the file is one stimulus. The timings are in samples rather than in seconds, so if you are creating these on your own, pay attention to your sampling rate (in `raw.info['sfreq']`)._____no_output_____
<code>
# Load some events. The format of these is start sample, end sample, and event ID.
events = mne.read_events(event_files[data_type])
print(events)
num_events = len(events)
unique_stimuli = np.unique(np.array(events)[:,2])
num_unique = len(unique_stimuli)
print('There are %d total events, corresponding to %d unique stimuli'%(num_events, num_unique))_____no_output_____
</code>
## Epochs
Great. So now that we have the events, we will "epoch" our data, which basically uses these timings to split up our data into trials of a given length. We will also set some parameters for data rejection to get rid of noisy trials. _____no_output_____
<code>
# Set some rejection criteria. This will be based on the peak-to-peak
# amplitude of your data.
if data_type=='eeg':
reject = {'eeg': 60e-6} # Higher than peak to peak amplitude of 60 µV will be rejected
scalings = None
units = None
elif data_type=='ecog':
reject = {'ecog': 10} # Higher than Z-score of 10 will be rejected
scalings = {'ecog': 1} # Don't rescale these as if they should be in µV
units = {'ecog': 'Z-score'}
_____no_output_____tmin = -0.2
tmax = 1.0
epochs = mne.Epochs(raw, events, tmin=tmin, tmax=tmax, baseline=(None, 0), reject=reject, verbose=True)_____no_output_____
</code>
So what's in this epochs data structure? If we look at it, we can see that we have an entry for each event ID, and we can see how many times that stimulus was played. You can also see whether baseline correction was done and for what time period, and whether any data was rejected._____no_output_____
<code>
epochs_____no_output_____
</code>
Now, you could decide at this point that you just want to work with the data directly as a numpy array. Luckily, that's super easy to do! We can just call `get_data()` on our epochs data structure, and this will output a matrix of `[events x channels x time points]`. If you do not limit the channel type, you will get all of them (including any EOG, stimulus channels, or other non-EEG/ECoG channels)._____no_output_____
<code>
ep_data = epochs.get_data()
print(ep_data.shape)_____no_output_____
</code>
## Plotting Epoched data
Ok... so we are getting ahead of ourselves. MNE-python provides a lot of ways to plot our data so that we don't have to deal with writing functions to do this ourselves! For example, if we'd like to plot the EEG/ECoG for all of the single trials we just loaded, along with an average across all of these trials (and channels of interest), we can do that easily with `epochs.plot_image()`._____no_output_____
<code>
epochs.plot_image(combine='mean', scalings=scalings, units=units)_____no_output_____
</code>
As before, we can choose specific channels to look at instead of looking at all of them at once. For which method do you think this would make the most difference? Why? _____no_output_____
<code>
if data_type == 'eeg':
picks = mne.pick_channels(raw.ch_names, include=['Fz','FCz','Cz','CPz','Pz'])
elif data_type == 'ecog':
picks = mne.pick_channels(raw.ch_names, include=['RPPST9','RPPST10','RPPST11'])
epochs.plot_image(picks = picks, combine='mean', scalings=scalings, units=units)_____no_output_____
</code>
We can also sort the trials, if we would like. This can be very convenient if you have reaction times or some other portion of the trial where reordering would make sense. Here, we'll just pick a channel and order by the mean activity within each trial._____no_output_____
<code>
if data_type == 'eeg':
picks = mne.pick_channels(raw.ch_names, include=['CP6'])
elif data_type == 'ecog':
picks = mne.pick_channels(raw.ch_names, include=['RPPST2'])
# Get the data as a numpy array
eps_data = epochs.get_data()
# Sort the data
new_order = eps_data[:,picks[0],:].mean(1).argsort(0)
epochs.plot_image(picks=picks, order=new_order, scalings=scalings, units=units)_____no_output_____
</code>
## Other ways to view epoched data
For EEG, another way to view these epochs by trial is using the scalp topography information. This allows us to quickly assess differences across the scalp in response to the stimuli. What do you notice about the responses?_____no_output_____
<code>
if data_type == 'eeg':
epochs.plot_topo_image(vmin=-30, vmax=30, fig_facecolor='w',font_color='k');_____no_output_____
</code>
## Comparing epochs of different trial types
So far we have just shown averages of activity across many different sentences. However, as mentioned above, the sentences come from multiple male and female talkers. So -- one quick split we could try is just to compare the responses to female vs. male talkers. This is relatively simple with the TIMIT stimuli because their file name starts with "f" or "m" to indicate this. _____no_output_____
<code>
# Make lists of the event ID numbers corresponding to "f" and "m" sentences
f_evs = []
m_evs = []
for k in event_id.keys():
if k[0] == 'f':
f_evs.append(event_id[k])
elif k[0] == 'm':
m_evs.append(event_id[k])
print(unique_stimuli)
f_evs_new = [v for v in f_evs if v in unique_stimuli]
m_evs_new = [v for v in m_evs if v in unique_stimuli]
# Epoch the data separately for "f" and "m" epochs
f_epochs = mne.Epochs(raw, events, event_id=f_evs_new, tmin=tmin, tmax=tmax, reject=reject)
m_epochs = mne.Epochs(raw, events, event_id=m_evs_new, tmin=tmin, tmax=tmax, reject=reject)_____no_output_____
</code>
Now we can plot the epochs just as we did above._____no_output_____
<code>
f_epochs.plot_image(combine='mean', show=False, scalings=scalings, units=units)
m_epochs.plot_image(combine='mean', show=False, scalings=scalings, units=units)_____no_output_____
</code>
Cool! So now we have a separate plot for the "f" and "m" talkers. However, it's not super convenient to compare the traces this way... we kind of want them on the same axis. MNE easily allows us to do this too! Instead of using the epochs, we can create `evoked` data structures, which are averaged epochs. You can [read more about evoked data structures here](https://mne.tools/dev/auto_tutorials/evoked/plot_10_evoked_overview.html).
## Compare evoked data_____no_output_____
<code>
evokeds = {'female': f_epochs.average(), 'male': m_epochs.average()}
mne.viz.plot_compare_evokeds(evokeds, show_sensors='upper right',picks=picks);_____no_output_____
</code>
If we actually want errorbars on this plot, we need to do this a bit differently. We can use the `iter_evoked()` method on our epochs structures to create a dictionary of conditions for which we will plot our comparisons with `plot_compare_evokeds`._____no_output_____
<code>
evokeds = {'f':list(f_epochs.iter_evoked()), 'm':list(m_epochs.iter_evoked())}
mne.viz.plot_compare_evokeds(evokeds, picks=picks);
_____no_output_____
</code>
## Plotting scalp topography
For EEG, another common plot you may see is a topographic map showing activity (or other data like p-values, or differences between conditions). In this example, we'll show the activity at -0.2, 0, 0.1, 0.2, 0.3, and 1 second. You can also of course choose just one time to look at._____no_output_____
<code>
if data_type == 'eeg':
times=[tmin, 0, 0.1, 0.2, 0.3, tmax]
epochs.average().plot_topomap(times, ch_type='eeg', cmap='PRGn', res=32,
outlines='skirt', time_unit='s');_____no_output_____
</code>
We can also plot arbitrary data using `mne.viz.plot_topomap`, and passing in a vector of data matching the number of EEG channels, and `raw.info` to give specifics on those channel locations._____no_output_____
<code>
if data_type == 'eeg':
chans = mne.pick_types(raw.info, eeg=True)
data = np.random.randn(len(chans),)
plt.figure()
mne.viz.plot_topomap(data, raw.info, show=True)
_____no_output_____
</code>
We can even animate these topo maps! This won't work well in jupyterhub, but feel free to try on your own!_____no_output_____
<code>
if data_type == 'eeg':
fig,anim=epochs.average().animate_topomap(blit=False, times=np.linspace(tmin, tmax, 100))_____no_output_____
</code>
## A few more fancy EEG plots
If we want to get especially fancy, we can also use `plot_joint` with our evoked data (or averaged epoched data, as shown below). This allows us to combine the ERPs for individual channels with topographic maps at time points that we specify. Pretty awesome!_____no_output_____
<code>
if data_type == 'eeg':
epochs.average().plot_joint(picks='eeg', times=[0.1, 0.2, 0.3])_____no_output_____
</code>
# What if I need more control? - matplotlib alternatives
If you feel you need more specific control over your plots, it's easy to get the data into a usable format for plotting with matplotlib. You can export both the raw and epoched data using the `get_data()` function, which will allow you to save your data as a numpy array `[ntrials x nchannels x ntimepoints]`.
Then, you can do whatever you want with the data! Throw it into matplotlib, use seaborn, or whatever your heart desires!_____no_output_____
<code>
if data_type == 'eeg':
picks = mne.pick_channels(raw.ch_names, include=['Fz','FCz','Cz','CPz','Pz'])
elif data_type == 'ecog':
picks = mne.pick_channels(raw.ch_names, include=['RPPST9','RPPST10','RPPST11'])
f_data = f_epochs.get_data(picks=picks)
m_data = m_epochs.get_data(picks=picks)
times = f_epochs.times
print(f_data.shape)_____no_output_____
</code>
## Plot evoked data with errorbars
We can recreate some similar plots to those in MNE-python with some of the matplotlib functions. Here we'll create something similar to what was plotted in `plot_compare_evokeds`._____no_output_____
<code>
def plot_errorbar(x, ydata, label=None, axlines=True, alpha=0.5, **kwargs):
'''
Plot the mean +/- standard error of ydata.
Inputs:
x : vector of x values
ydata : matrix of your data (this will be averaged along the 0th dimension)
label : A string containing the label for this plot
axlines : [bool], whether to draw the horizontal and vertical axes
alpha: opacity of the standard error area
'''
ymean = ydata.mean(0)
ystderr = ydata.std(0)/np.sqrt(ydata.shape[0])
plt.plot(x, ydata.mean(0), label=label, **kwargs)
plt.fill_between(x, ymean+ystderr, ymean-ystderr, alpha=alpha, **kwargs)
if axlines:
plt.axvline(0, color='k', linestyle='--')
plt.axhline(0, color='k', linestyle='--')
plt.gca().set_xlim([x.min(), x.max()])_____no_output_____plt.figure()
plot_errorbar(times, f_data.mean(0), label='female')
plot_errorbar(times, m_data.mean(0), label='male')
plt.xlabel('Time (s)')
plt.ylabel('Z-scored high gamma')
plt.legend()_____no_output_____
</code>
## ECoG Exercise:
1. If you wanted to look at each ECoG electrode individually to find which ones have responses to the speech data, how would you do this?
2. Can you plot the comparison between "f" and "m" trials for each electrode as a subplot (try using `plt.subplot()` from `matplotlib`)_____no_output_____
<code>
# Get the data for f trials
# Get the data for m trials
# Loop through each channel, and create a set of subplots for each_____no_output_____
</code>
# Hooray, the End!
You did it! Go forth and use MNE-python in your own projects, or even contribute to the code! 🧠_____no_output_____
| {
"repository": "bastivkl/nh2020-curriculum",
"path": "we-visualizing-eeg-hamilton/EEG_viz_mne.ipynb",
"matched_keywords": [
"neuroscience"
],
"stars": 94,
"size": 30082,
"hexsha": "d0ddd73a99ef0ae0fc81c021acdf2942d5189d00",
"max_line_length": 711,
"avg_line_length": 38.8656330749,
"alphanum_fraction": 0.6266205704
} |
# Notebook from leemjm92/dsi15_capstone_steering_wheel_prediction
Path: .ipynb_checkpoints/02_modeling_model_5-checkpoint.ipynb
<code>
from google.colab import drive
drive.mount('/content/drive')Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly
Enter your authorization code:
··········
Mounted at /content/drive
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from numpy import load
from numpy import asarray
from numpy import savez_compressed
from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.optimizers import Adam
from keras.metrics import RootMeanSquaredError
from keras.models import load_model
from keras.callbacks import *
%matplotlib inline_____no_output_____###################################### loading new data_____no_output_____camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/nedlog2_clea.csv')_____no_output_____def camera_processing(camera_file, file_name):
# camera file
camera = camera_file.f.arr_0
camera = camera.astype('float32')
camera = camera/255
camera = camera.reshape(camera.shape[0], camera.shape[1], camera.shape[2], 1)
savez_compressed(f'/content/drive/My Drive/datasets/{file_name}_train', camera)
return print('Done')_____no_output_____def log_processing(log_file, file_name):
log_file['steering_avg_radian'] = log_file['steering_avg'] * np.pi / 180
log_file.to_csv(f'/content/drive/My Drive/datasets/{file_name}_train.csv')
return print('Done')_____no_output_____camera_processing(camera2, 'camera2')_____no_output_____log_processing(log2, 'log2')Done
def train_split(camera_file_name, log_file_name):
# load camera file
X = load(f'/content/drive/My Drive/datasets/{camera_file_name}_train.npz')
X = X.f.arr_0
# load log file
log = pd.read_csv(f'/content/drive/My Drive/datasets/{log_file_name}_train.csv')
y = log['steering_avg_radian']
y = y.to_numpy()
y = y.reshape(y.shape[0], 1)
# train test split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# save them into individual file doing so due to ram management
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_X_train', X_train)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_X_test', X_test)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_y_train', y_train)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_y_test', y_test)
return print('Done')_____no_output_____train_split('camera2', 'log2')Done
# # log file
# log_file['steering_avg_radian'] = log_file['steering_avg'] * np.pi / 180
# y = log_file['steering_avg_radian']
# y = y.to_numpy_____no_output_____############################# end of loading new data_____no_output_____X = X.f.arr_0_____no_output_____X.shape_____no_output_____log1 = pd.read_csv('/content/drive/My Drive/log1_full.csv')_____no_output_____log1.head()_____no_output_____# convert the angle from degree to radian
log1['steering_avg_radian'] = log1['steering_avg'] * np.pi / 180_____no_output_____log1.head()_____no_output_____log1.to_csv('/content/drive/My Drive/log1_train.csv')_____no_output_____log1 = pd.read_csv('/content/drive/My Drive/log1_train.csv')_____no_output_____y = log1['steering_avg_radian']_____no_output_____y = y.to_numpy()_____no_output_____y.shape_____no_output_____y = y.reshape(y.shape[0], 1)_____no_output_____y.shape_____no_output_____from sklearn.model_selection import train_test_split_____no_output_____# split it so that the validation set is the last 20% of the dataset as I want sequential data
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=False)_____no_output_____########################### start of train with camera8 data for model 5 epochs = 30_____no_output_____camera8 = load('/content/drive/My Drive/datasets/camera8_cleaned.npz')
log8 = pd.read_csv('/content/drive/My Drive/datasets/log8_cleaned.csv')_____no_output_____camera_processing(camera8, 'camera8')Done
log_processing(log1, 'log1')Done
train_split('camera1', 'log1')Done
X_train, X_test, y_train, y_test = train_load('camera8')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____model = Sequential()
model.add(Conv2D(16, (8, 8), strides=(4, 4), activation='elu', padding="same"))
model.add(Conv2D(32, (5, 5), strides=(2, 2), activation='elu', padding="same"))
model.add(Conv2D(64, (5, 5), strides=(2, 2), padding="same"))
model.add(Flatten())
model.add(Dropout(.2))
model.add(Dense(512, activation='elu'))
model.add(Dropout(.5))
model.add(Dense(1))
model.compile(loss='mse', optimizer=Adam(lr=1e-04), metrics=[RootMeanSquaredError()])_____no_output_____filepath = "/content/drive/My Drive/epochs/model_2_1_camera8.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
347/347 [==============================] - ETA: 0s - loss: 0.3885 - root_mean_squared_error: 0.6233
Epoch 00001: val_loss improved from inf to 0.35014, saving model to /content/drive/My Drive/epochs/model_5_1_camera8.0001-0.3501.h5
347/347 [==============================] - 2s 7ms/step - loss: 0.3885 - root_mean_squared_error: 0.6233 - val_loss: 0.3501 - val_root_mean_squared_error: 0.5917
Epoch 2/30
344/347 [============================>.] - ETA: 0s - loss: 0.3511 - root_mean_squared_error: 0.5925
Epoch 00002: val_loss improved from 0.35014 to 0.33945, saving model to /content/drive/My Drive/epochs/model_5_1_camera8.0002-0.3395.h5
347/347 [==============================] - 2s 6ms/step - loss: 0.3530 - root_mean_squared_error: 0.5942 - val_loss: 0.3395 - val_root_mean_squared_error: 0.5826
Epoch 3/30
345/347 [============================>.] - ETA: 0s - loss: 0.3234 - root_mean_squared_error: 0.5687
Epoch 00003: val_loss improved from 0.33945 to 0.33724, saving model to /content/drive/My Drive/epochs/model_5_1_camera8.0003-0.3372.h5
347/347 [==============================] - 2s 6ms/step - loss: 0.3250 - root_mean_squared_error: 0.5701 - val_loss: 0.3372 - val_root_mean_squared_error: 0.5807
Epoch 4/30
338/347 [============================>.] - ETA: 0s - loss: 0.3078 - root_mean_squared_error: 0.5548
Epoch 00004: val_loss improved from 0.33724 to 0.33642, saving model to /content/drive/My Drive/epochs/model_5_1_camera8.0004-0.3364.h5
347/347 [==============================] - 2s 7ms/step - loss: 0.3104 - root_mean_squared_error: 0.5572 - val_loss: 0.3364 - val_root_mean_squared_error: 0.5800
Epoch 5/30
340/347 [============================>.] - ETA: 0s - loss: 0.2960 - root_mean_squared_error: 0.5440
Epoch 00005: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2966 - root_mean_squared_error: 0.5446 - val_loss: 0.3365 - val_root_mean_squared_error: 0.5801
Epoch 6/30
340/347 [============================>.] - ETA: 0s - loss: 0.2852 - root_mean_squared_error: 0.5340
Epoch 00006: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2837 - root_mean_squared_error: 0.5326 - val_loss: 0.3387 - val_root_mean_squared_error: 0.5820
Epoch 7/30
343/347 [============================>.] - ETA: 0s - loss: 0.2760 - root_mean_squared_error: 0.5254
Epoch 00007: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2765 - root_mean_squared_error: 0.5258 - val_loss: 0.3412 - val_root_mean_squared_error: 0.5842
Epoch 8/30
341/347 [============================>.] - ETA: 0s - loss: 0.2689 - root_mean_squared_error: 0.5185
Epoch 00008: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2699 - root_mean_squared_error: 0.5195 - val_loss: 0.3443 - val_root_mean_squared_error: 0.5868
Epoch 9/30
344/347 [============================>.] - ETA: 0s - loss: 0.2649 - root_mean_squared_error: 0.5147
Epoch 00009: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2642 - root_mean_squared_error: 0.5140 - val_loss: 0.3437 - val_root_mean_squared_error: 0.5862
Epoch 10/30
344/347 [============================>.] - ETA: 0s - loss: 0.2584 - root_mean_squared_error: 0.5083
Epoch 00010: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2577 - root_mean_squared_error: 0.5076 - val_loss: 0.3449 - val_root_mean_squared_error: 0.5872
Epoch 11/30
344/347 [============================>.] - ETA: 0s - loss: 0.2544 - root_mean_squared_error: 0.5043
Epoch 00011: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2539 - root_mean_squared_error: 0.5039 - val_loss: 0.3474 - val_root_mean_squared_error: 0.5894
Epoch 12/30
347/347 [==============================] - ETA: 0s - loss: 0.2496 - root_mean_squared_error: 0.4996
Epoch 00012: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2496 - root_mean_squared_error: 0.4996 - val_loss: 0.3487 - val_root_mean_squared_error: 0.5905
Epoch 13/30
343/347 [============================>.] - ETA: 0s - loss: 0.2446 - root_mean_squared_error: 0.4945
Epoch 00013: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2449 - root_mean_squared_error: 0.4949 - val_loss: 0.3463 - val_root_mean_squared_error: 0.5884
Epoch 14/30
344/347 [============================>.] - ETA: 0s - loss: 0.2423 - root_mean_squared_error: 0.4923
Epoch 00014: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2418 - root_mean_squared_error: 0.4917 - val_loss: 0.3542 - val_root_mean_squared_error: 0.5951
Epoch 15/30
340/347 [============================>.] - ETA: 0s - loss: 0.2410 - root_mean_squared_error: 0.4909
Epoch 00015: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2398 - root_mean_squared_error: 0.4897 - val_loss: 0.3555 - val_root_mean_squared_error: 0.5962
Epoch 16/30
340/347 [============================>.] - ETA: 0s - loss: 0.2349 - root_mean_squared_error: 0.4847
Epoch 00016: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2346 - root_mean_squared_error: 0.4843 - val_loss: 0.3555 - val_root_mean_squared_error: 0.5962
Epoch 17/30
346/347 [============================>.] - ETA: 0s - loss: 0.2315 - root_mean_squared_error: 0.4812
Epoch 00017: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2314 - root_mean_squared_error: 0.4811 - val_loss: 0.3629 - val_root_mean_squared_error: 0.6024
Epoch 18/30
342/347 [============================>.] - ETA: 0s - loss: 0.2290 - root_mean_squared_error: 0.4785
Epoch 00018: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2279 - root_mean_squared_error: 0.4773 - val_loss: 0.3644 - val_root_mean_squared_error: 0.6036
Epoch 19/30
342/347 [============================>.] - ETA: 0s - loss: 0.2244 - root_mean_squared_error: 0.4737
Epoch 00019: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2241 - root_mean_squared_error: 0.4734 - val_loss: 0.3732 - val_root_mean_squared_error: 0.6109
Epoch 20/30
343/347 [============================>.] - ETA: 0s - loss: 0.2229 - root_mean_squared_error: 0.4721
Epoch 00020: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2230 - root_mean_squared_error: 0.4723 - val_loss: 0.3757 - val_root_mean_squared_error: 0.6129
Epoch 21/30
346/347 [============================>.] - ETA: 0s - loss: 0.2226 - root_mean_squared_error: 0.4718
Epoch 00021: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2223 - root_mean_squared_error: 0.4715 - val_loss: 0.3742 - val_root_mean_squared_error: 0.6118
Epoch 22/30
343/347 [============================>.] - ETA: 0s - loss: 0.2182 - root_mean_squared_error: 0.4672
Epoch 00022: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2178 - root_mean_squared_error: 0.4666 - val_loss: 0.3794 - val_root_mean_squared_error: 0.6159
Epoch 23/30
343/347 [============================>.] - ETA: 0s - loss: 0.2170 - root_mean_squared_error: 0.4658
Epoch 00023: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2175 - root_mean_squared_error: 0.4664 - val_loss: 0.3792 - val_root_mean_squared_error: 0.6158
Epoch 24/30
340/347 [============================>.] - ETA: 0s - loss: 0.2145 - root_mean_squared_error: 0.4631
Epoch 00024: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2135 - root_mean_squared_error: 0.4620 - val_loss: 0.3804 - val_root_mean_squared_error: 0.6168
Epoch 25/30
341/347 [============================>.] - ETA: 0s - loss: 0.2097 - root_mean_squared_error: 0.4579
Epoch 00025: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2102 - root_mean_squared_error: 0.4585 - val_loss: 0.3917 - val_root_mean_squared_error: 0.6258
Epoch 26/30
343/347 [============================>.] - ETA: 0s - loss: 0.2090 - root_mean_squared_error: 0.4571
Epoch 00026: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 6ms/step - loss: 0.2085 - root_mean_squared_error: 0.4567 - val_loss: 0.3987 - val_root_mean_squared_error: 0.6314
Epoch 27/30
338/347 [============================>.] - ETA: 0s - loss: 0.2043 - root_mean_squared_error: 0.4519
Epoch 00027: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 7ms/step - loss: 0.2067 - root_mean_squared_error: 0.4546 - val_loss: 0.3909 - val_root_mean_squared_error: 0.6252
Epoch 28/30
341/347 [============================>.] - ETA: 0s - loss: 0.2044 - root_mean_squared_error: 0.4521
Epoch 00028: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 7ms/step - loss: 0.2048 - root_mean_squared_error: 0.4525 - val_loss: 0.3991 - val_root_mean_squared_error: 0.6317
Epoch 29/30
342/347 [============================>.] - ETA: 0s - loss: 0.2035 - root_mean_squared_error: 0.4512
Epoch 00029: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 7ms/step - loss: 0.2038 - root_mean_squared_error: 0.4514 - val_loss: 0.4025 - val_root_mean_squared_error: 0.6345
Epoch 30/30
345/347 [============================>.] - ETA: 0s - loss: 0.2001 - root_mean_squared_error: 0.4473
Epoch 00030: val_loss did not improve from 0.33642
347/347 [==============================] - 2s 7ms/step - loss: 0.2002 - root_mean_squared_error: 0.4474 - val_loss: 0.3944 - val_root_mean_squared_error: 0.6280
model_2_camera8 = model_history('model_2_1_camera8')_____no_output_____model_3_camera8.head()_____no_output_____#################### end of training camera9 data for model 3_____no_output_____########################### continue training with camera1 data for model 3_____no_output_____from keras.models import load_model_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_1_camera8.0004-0.3364.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera1')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____from keras.callbacks import *
filepath = "/content/drive/My Drive/epochs/model_5_2_camera1.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
641/641 [==============================] - ETA: 0s - loss: 0.2590 - root_mean_squared_error: 0.5089
Epoch 00001: val_loss improved from inf to 0.22856, saving model to /content/drive/My Drive/epochs/model_5_2_camera1.0001-0.2286.h5
641/641 [==============================] - 5s 8ms/step - loss: 0.2590 - root_mean_squared_error: 0.5089 - val_loss: 0.2286 - val_root_mean_squared_error: 0.4781
Epoch 2/30
640/641 [============================>.] - ETA: 0s - loss: 0.2440 - root_mean_squared_error: 0.4940
Epoch 00002: val_loss improved from 0.22856 to 0.22818, saving model to /content/drive/My Drive/epochs/model_5_2_camera1.0002-0.2282.h5
641/641 [==============================] - 4s 6ms/step - loss: 0.2438 - root_mean_squared_error: 0.4938 - val_loss: 0.2282 - val_root_mean_squared_error: 0.4777
Epoch 3/30
639/641 [============================>.] - ETA: 0s - loss: 0.2380 - root_mean_squared_error: 0.4879
Epoch 00003: val_loss improved from 0.22818 to 0.22266, saving model to /content/drive/My Drive/epochs/model_5_2_camera1.0003-0.2227.h5
641/641 [==============================] - 4s 6ms/step - loss: 0.2378 - root_mean_squared_error: 0.4876 - val_loss: 0.2227 - val_root_mean_squared_error: 0.4719
Epoch 4/30
639/641 [============================>.] - ETA: 0s - loss: 0.2328 - root_mean_squared_error: 0.4825
Epoch 00004: val_loss improved from 0.22266 to 0.22231, saving model to /content/drive/My Drive/epochs/model_5_2_camera1.0004-0.2223.h5
641/641 [==============================] - 4s 6ms/step - loss: 0.2323 - root_mean_squared_error: 0.4819 - val_loss: 0.2223 - val_root_mean_squared_error: 0.4715
Epoch 5/30
636/641 [============================>.] - ETA: 0s - loss: 0.2293 - root_mean_squared_error: 0.4789
Epoch 00005: val_loss improved from 0.22231 to 0.22180, saving model to /content/drive/My Drive/epochs/model_5_2_camera1.0005-0.2218.h5
641/641 [==============================] - 4s 6ms/step - loss: 0.2291 - root_mean_squared_error: 0.4787 - val_loss: 0.2218 - val_root_mean_squared_error: 0.4710
Epoch 6/30
640/641 [============================>.] - ETA: 0s - loss: 0.2259 - root_mean_squared_error: 0.4753
Epoch 00006: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2257 - root_mean_squared_error: 0.4751 - val_loss: 0.2239 - val_root_mean_squared_error: 0.4732
Epoch 7/30
636/641 [============================>.] - ETA: 0s - loss: 0.2243 - root_mean_squared_error: 0.4736
Epoch 00007: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2231 - root_mean_squared_error: 0.4723 - val_loss: 0.2221 - val_root_mean_squared_error: 0.4713
Epoch 8/30
639/641 [============================>.] - ETA: 0s - loss: 0.2193 - root_mean_squared_error: 0.4683
Epoch 00008: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2193 - root_mean_squared_error: 0.4683 - val_loss: 0.2240 - val_root_mean_squared_error: 0.4732
Epoch 9/30
640/641 [============================>.] - ETA: 0s - loss: 0.2181 - root_mean_squared_error: 0.4671
Epoch 00009: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2179 - root_mean_squared_error: 0.4668 - val_loss: 0.2257 - val_root_mean_squared_error: 0.4751
Epoch 10/30
634/641 [============================>.] - ETA: 0s - loss: 0.2147 - root_mean_squared_error: 0.4633
Epoch 00010: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2152 - root_mean_squared_error: 0.4639 - val_loss: 0.2244 - val_root_mean_squared_error: 0.4737
Epoch 11/30
637/641 [============================>.] - ETA: 0s - loss: 0.2133 - root_mean_squared_error: 0.4619
Epoch 00011: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2132 - root_mean_squared_error: 0.4617 - val_loss: 0.2264 - val_root_mean_squared_error: 0.4758
Epoch 12/30
635/641 [============================>.] - ETA: 0s - loss: 0.2100 - root_mean_squared_error: 0.4583
Epoch 00012: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2112 - root_mean_squared_error: 0.4596 - val_loss: 0.2243 - val_root_mean_squared_error: 0.4736
Epoch 13/30
637/641 [============================>.] - ETA: 0s - loss: 0.2086 - root_mean_squared_error: 0.4568
Epoch 00013: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 7ms/step - loss: 0.2098 - root_mean_squared_error: 0.4580 - val_loss: 0.2318 - val_root_mean_squared_error: 0.4815
Epoch 14/30
641/641 [==============================] - ETA: 0s - loss: 0.2076 - root_mean_squared_error: 0.4557
Epoch 00014: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2076 - root_mean_squared_error: 0.4557 - val_loss: 0.2272 - val_root_mean_squared_error: 0.4766
Epoch 15/30
632/641 [============================>.] - ETA: 0s - loss: 0.2059 - root_mean_squared_error: 0.4537
Epoch 00015: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2053 - root_mean_squared_error: 0.4531 - val_loss: 0.2304 - val_root_mean_squared_error: 0.4800
Epoch 16/30
636/641 [============================>.] - ETA: 0s - loss: 0.2047 - root_mean_squared_error: 0.4525
Epoch 00016: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2046 - root_mean_squared_error: 0.4523 - val_loss: 0.2326 - val_root_mean_squared_error: 0.4823
Epoch 17/30
640/641 [============================>.] - ETA: 0s - loss: 0.2035 - root_mean_squared_error: 0.4511
Epoch 00017: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2035 - root_mean_squared_error: 0.4511 - val_loss: 0.2286 - val_root_mean_squared_error: 0.4781
Epoch 18/30
632/641 [============================>.] - ETA: 0s - loss: 0.2024 - root_mean_squared_error: 0.4498
Epoch 00018: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2023 - root_mean_squared_error: 0.4498 - val_loss: 0.2285 - val_root_mean_squared_error: 0.4780
Epoch 19/30
632/641 [============================>.] - ETA: 0s - loss: 0.2009 - root_mean_squared_error: 0.4482
Epoch 00019: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.2008 - root_mean_squared_error: 0.4481 - val_loss: 0.2319 - val_root_mean_squared_error: 0.4816
Epoch 20/30
640/641 [============================>.] - ETA: 0s - loss: 0.1993 - root_mean_squared_error: 0.4465
Epoch 00020: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1995 - root_mean_squared_error: 0.4467 - val_loss: 0.2303 - val_root_mean_squared_error: 0.4799
Epoch 21/30
634/641 [============================>.] - ETA: 0s - loss: 0.1970 - root_mean_squared_error: 0.4438
Epoch 00021: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1979 - root_mean_squared_error: 0.4448 - val_loss: 0.2317 - val_root_mean_squared_error: 0.4814
Epoch 22/30
636/641 [============================>.] - ETA: 0s - loss: 0.1982 - root_mean_squared_error: 0.4452
Epoch 00022: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1974 - root_mean_squared_error: 0.4443 - val_loss: 0.2306 - val_root_mean_squared_error: 0.4803
Epoch 23/30
639/641 [============================>.] - ETA: 0s - loss: 0.1953 - root_mean_squared_error: 0.4420
Epoch 00023: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1953 - root_mean_squared_error: 0.4419 - val_loss: 0.2341 - val_root_mean_squared_error: 0.4838
Epoch 24/30
633/641 [============================>.] - ETA: 0s - loss: 0.1937 - root_mean_squared_error: 0.4401
Epoch 00024: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1944 - root_mean_squared_error: 0.4409 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 25/30
637/641 [============================>.] - ETA: 0s - loss: 0.1934 - root_mean_squared_error: 0.4398
Epoch 00025: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1935 - root_mean_squared_error: 0.4399 - val_loss: 0.2346 - val_root_mean_squared_error: 0.4844
Epoch 26/30
634/641 [============================>.] - ETA: 0s - loss: 0.1925 - root_mean_squared_error: 0.4387
Epoch 00026: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1920 - root_mean_squared_error: 0.4382 - val_loss: 0.2371 - val_root_mean_squared_error: 0.4869
Epoch 27/30
637/641 [============================>.] - ETA: 0s - loss: 0.1928 - root_mean_squared_error: 0.4391
Epoch 00027: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1921 - root_mean_squared_error: 0.4383 - val_loss: 0.2362 - val_root_mean_squared_error: 0.4860
Epoch 28/30
633/641 [============================>.] - ETA: 0s - loss: 0.1905 - root_mean_squared_error: 0.4364
Epoch 00028: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1904 - root_mean_squared_error: 0.4363 - val_loss: 0.2396 - val_root_mean_squared_error: 0.4895
Epoch 29/30
640/641 [============================>.] - ETA: 0s - loss: 0.1897 - root_mean_squared_error: 0.4355
Epoch 00029: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1896 - root_mean_squared_error: 0.4354 - val_loss: 0.2357 - val_root_mean_squared_error: 0.4855
Epoch 30/30
637/641 [============================>.] - ETA: 0s - loss: 0.1899 - root_mean_squared_error: 0.4358
Epoch 00030: val_loss did not improve from 0.22180
641/641 [==============================] - 4s 6ms/step - loss: 0.1893 - root_mean_squared_error: 0.4350 - val_loss: 0.2410 - val_root_mean_squared_error: 0.4909
ticks = [i for i in range(0, 31, 5)]
labels = [i for i in range(0, 31, 5)]
labels[0] = 1_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera1', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model4_2_camera1.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera1 = model_history('model_4_2_camera1')_____no_output_____########################### end of train with camera1 data for model 3_____no_output_____########################### star of train with camera9 data for model 3_____no_output_____camera3 = load('/content/drive/My Drive/datasets/camera3_cleaned.npz')
log3 = pd.read_csv('/content/drive/My Drive/datasets/log3_cleaned.csv')_____no_output_____camera_processing(camera3, 'camera3')Done
log_processing(log3, 'log3')Done
def train_split(camera_file_name, log_file_name):
# load camera file
X = load(f'/content/drive/My Drive/datasets/{camera_file_name}_train.npz')
X = X.f.arr_0
# load log file
log = pd.read_csv(f'/content/drive/My Drive/datasets/{log_file_name}_train.csv')
y = log['steering_avg_radian']
y = y.to_numpy()
y = y.reshape(y.shape[0], 1)
# train test split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)
# save them into individual file doing so due to ram management
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_X_train', X_train)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_X_test', X_test)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_y_train', y_train)
savez_compressed(f'/content/drive/My Drive/datasets/{camera_file_name}_y_test', y_test)
return print('Done')_____no_output_____train_split('camera3', 'log3')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____from keras.models import load_model_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_2_camera1.0006-0.2219.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera9')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____from keras.callbacks import *
filepath = "/content/drive/My Drive/epochs/model_5_3_camera9.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
599/599 [==============================] - ETA: 0s - loss: 0.0626 - root_mean_squared_error: 0.2503
Epoch 00001: val_loss improved from inf to 0.05617, saving model to /content/drive/My Drive/epochs/model_5_3_camera9.0001-0.0562.h5
599/599 [==============================] - 4s 7ms/step - loss: 0.0626 - root_mean_squared_error: 0.2503 - val_loss: 0.0562 - val_root_mean_squared_error: 0.2370
Epoch 2/30
598/599 [============================>.] - ETA: 0s - loss: 0.0539 - root_mean_squared_error: 0.2321
Epoch 00002: val_loss improved from 0.05617 to 0.05341, saving model to /content/drive/My Drive/epochs/model_5_3_camera9.0002-0.0534.h5
599/599 [==============================] - 4s 6ms/step - loss: 0.0538 - root_mean_squared_error: 0.2320 - val_loss: 0.0534 - val_root_mean_squared_error: 0.2311
Epoch 3/30
597/599 [============================>.] - ETA: 0s - loss: 0.0510 - root_mean_squared_error: 0.2258
Epoch 00003: val_loss improved from 0.05341 to 0.05258, saving model to /content/drive/My Drive/epochs/model_5_3_camera9.0003-0.0526.h5
599/599 [==============================] - 4s 7ms/step - loss: 0.0513 - root_mean_squared_error: 0.2265 - val_loss: 0.0526 - val_root_mean_squared_error: 0.2293
Epoch 4/30
596/599 [============================>.] - ETA: 0s - loss: 0.0497 - root_mean_squared_error: 0.2229
Epoch 00004: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0498 - root_mean_squared_error: 0.2231 - val_loss: 0.0528 - val_root_mean_squared_error: 0.2298
Epoch 5/30
591/599 [============================>.] - ETA: 0s - loss: 0.0480 - root_mean_squared_error: 0.2191
Epoch 00005: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0485 - root_mean_squared_error: 0.2202 - val_loss: 0.0530 - val_root_mean_squared_error: 0.2303
Epoch 6/30
598/599 [============================>.] - ETA: 0s - loss: 0.0475 - root_mean_squared_error: 0.2179
Epoch 00006: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0475 - root_mean_squared_error: 0.2178 - val_loss: 0.0534 - val_root_mean_squared_error: 0.2311
Epoch 7/30
593/599 [============================>.] - ETA: 0s - loss: 0.0461 - root_mean_squared_error: 0.2147
Epoch 00007: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0462 - root_mean_squared_error: 0.2149 - val_loss: 0.0529 - val_root_mean_squared_error: 0.2299
Epoch 8/30
597/599 [============================>.] - ETA: 0s - loss: 0.0456 - root_mean_squared_error: 0.2134
Epoch 00008: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0455 - root_mean_squared_error: 0.2133 - val_loss: 0.0531 - val_root_mean_squared_error: 0.2304
Epoch 9/30
594/599 [============================>.] - ETA: 0s - loss: 0.0451 - root_mean_squared_error: 0.2125
Epoch 00009: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0450 - root_mean_squared_error: 0.2121 - val_loss: 0.0543 - val_root_mean_squared_error: 0.2330
Epoch 10/30
594/599 [============================>.] - ETA: 0s - loss: 0.0446 - root_mean_squared_error: 0.2112
Epoch 00010: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0444 - root_mean_squared_error: 0.2106 - val_loss: 0.0538 - val_root_mean_squared_error: 0.2320
Epoch 11/30
594/599 [============================>.] - ETA: 0s - loss: 0.0433 - root_mean_squared_error: 0.2081
Epoch 00011: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0435 - root_mean_squared_error: 0.2085 - val_loss: 0.0551 - val_root_mean_squared_error: 0.2347
Epoch 12/30
594/599 [============================>.] - ETA: 0s - loss: 0.0428 - root_mean_squared_error: 0.2070
Epoch 00012: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0431 - root_mean_squared_error: 0.2076 - val_loss: 0.0546 - val_root_mean_squared_error: 0.2337
Epoch 13/30
596/599 [============================>.] - ETA: 0s - loss: 0.0425 - root_mean_squared_error: 0.2062
Epoch 00013: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0424 - root_mean_squared_error: 0.2060 - val_loss: 0.0548 - val_root_mean_squared_error: 0.2341
Epoch 14/30
595/599 [============================>.] - ETA: 0s - loss: 0.0413 - root_mean_squared_error: 0.2032
Epoch 00014: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0417 - root_mean_squared_error: 0.2042 - val_loss: 0.0558 - val_root_mean_squared_error: 0.2363
Epoch 15/30
596/599 [============================>.] - ETA: 0s - loss: 0.0416 - root_mean_squared_error: 0.2039
Epoch 00015: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0415 - root_mean_squared_error: 0.2037 - val_loss: 0.0554 - val_root_mean_squared_error: 0.2353
Epoch 16/30
594/599 [============================>.] - ETA: 0s - loss: 0.0408 - root_mean_squared_error: 0.2021
Epoch 00016: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0408 - root_mean_squared_error: 0.2019 - val_loss: 0.0560 - val_root_mean_squared_error: 0.2367
Epoch 17/30
594/599 [============================>.] - ETA: 0s - loss: 0.0409 - root_mean_squared_error: 0.2021
Epoch 00017: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0408 - root_mean_squared_error: 0.2020 - val_loss: 0.0548 - val_root_mean_squared_error: 0.2341
Epoch 18/30
597/599 [============================>.] - ETA: 0s - loss: 0.0398 - root_mean_squared_error: 0.1994
Epoch 00018: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0398 - root_mean_squared_error: 0.1994 - val_loss: 0.0561 - val_root_mean_squared_error: 0.2369
Epoch 19/30
593/599 [============================>.] - ETA: 0s - loss: 0.0394 - root_mean_squared_error: 0.1985
Epoch 00019: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0395 - root_mean_squared_error: 0.1988 - val_loss: 0.0574 - val_root_mean_squared_error: 0.2395
Epoch 20/30
599/599 [==============================] - ETA: 0s - loss: 0.0392 - root_mean_squared_error: 0.1980
Epoch 00020: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0392 - root_mean_squared_error: 0.1980 - val_loss: 0.0578 - val_root_mean_squared_error: 0.2405
Epoch 21/30
595/599 [============================>.] - ETA: 0s - loss: 0.0393 - root_mean_squared_error: 0.1982
Epoch 00021: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0393 - root_mean_squared_error: 0.1984 - val_loss: 0.0577 - val_root_mean_squared_error: 0.2403
Epoch 22/30
596/599 [============================>.] - ETA: 0s - loss: 0.0390 - root_mean_squared_error: 0.1974
Epoch 00022: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0388 - root_mean_squared_error: 0.1971 - val_loss: 0.0571 - val_root_mean_squared_error: 0.2390
Epoch 23/30
594/599 [============================>.] - ETA: 0s - loss: 0.0387 - root_mean_squared_error: 0.1966
Epoch 00023: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0385 - root_mean_squared_error: 0.1962 - val_loss: 0.0570 - val_root_mean_squared_error: 0.2388
Epoch 24/30
596/599 [============================>.] - ETA: 0s - loss: 0.0382 - root_mean_squared_error: 0.1954
Epoch 00024: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0381 - root_mean_squared_error: 0.1952 - val_loss: 0.0588 - val_root_mean_squared_error: 0.2425
Epoch 25/30
594/599 [============================>.] - ETA: 0s - loss: 0.0377 - root_mean_squared_error: 0.1941
Epoch 00025: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0377 - root_mean_squared_error: 0.1941 - val_loss: 0.0590 - val_root_mean_squared_error: 0.2430
Epoch 26/30
594/599 [============================>.] - ETA: 0s - loss: 0.0374 - root_mean_squared_error: 0.1935
Epoch 00026: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0374 - root_mean_squared_error: 0.1934 - val_loss: 0.0590 - val_root_mean_squared_error: 0.2428
Epoch 27/30
596/599 [============================>.] - ETA: 0s - loss: 0.0371 - root_mean_squared_error: 0.1926
Epoch 00027: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0372 - root_mean_squared_error: 0.1929 - val_loss: 0.0596 - val_root_mean_squared_error: 0.2442
Epoch 28/30
590/599 [============================>.] - ETA: 0s - loss: 0.0369 - root_mean_squared_error: 0.1920
Epoch 00028: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0372 - root_mean_squared_error: 0.1928 - val_loss: 0.0594 - val_root_mean_squared_error: 0.2437
Epoch 29/30
591/599 [============================>.] - ETA: 0s - loss: 0.0368 - root_mean_squared_error: 0.1918
Epoch 00029: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0366 - root_mean_squared_error: 0.1913 - val_loss: 0.0595 - val_root_mean_squared_error: 0.2439
Epoch 30/30
593/599 [============================>.] - ETA: 0s - loss: 0.0364 - root_mean_squared_error: 0.1908
Epoch 00030: val_loss did not improve from 0.05258
599/599 [==============================] - 4s 6ms/step - loss: 0.0364 - root_mean_squared_error: 0.1907 - val_loss: 0.0600 - val_root_mean_squared_error: 0.2450
ticks = [i for i in range(10)]
labels = [i for i in range(1, 11)]_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera9', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_3_camera9.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera9 = model_history('model_5_3_camera9')_____no_output_____model_3_camera9.head()_____no_output_____#################### end of training camera9 data for model 3_____no_output_____########################### start of train with camera2 data for model 3_____no_output_____camera4 = load('/content/drive/My Drive/datasets/camera4_cleaned.npz')
log4 = pd.read_csv('/content/drive/My Drive/datasets/log4_cleaned.csv')_____no_output_____camera_processing(camera4, 'camera4')Done
log_processing(log4, 'log4')Done
train_split('camera4', 'log4')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_3_camera9.0003-0.0526.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera2')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_4_camera2.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
702/703 [============================>.] - ETA: 0s - loss: 0.0459 - root_mean_squared_error: 0.2142
Epoch 00001: val_loss improved from inf to 0.03861, saving model to /content/drive/My Drive/epochs/model_5_4_camera2.0001-0.0386.h5
703/703 [==============================] - 5s 7ms/step - loss: 0.0459 - root_mean_squared_error: 0.2143 - val_loss: 0.0386 - val_root_mean_squared_error: 0.1965
Epoch 2/30
694/703 [============================>.] - ETA: 0s - loss: 0.0429 - root_mean_squared_error: 0.2072
Epoch 00002: val_loss improved from 0.03861 to 0.03833, saving model to /content/drive/My Drive/epochs/model_5_4_camera2.0002-0.0383.h5
703/703 [==============================] - 4s 6ms/step - loss: 0.0429 - root_mean_squared_error: 0.2070 - val_loss: 0.0383 - val_root_mean_squared_error: 0.1958
Epoch 3/30
696/703 [============================>.] - ETA: 0s - loss: 0.0416 - root_mean_squared_error: 0.2039
Epoch 00003: val_loss improved from 0.03833 to 0.03827, saving model to /content/drive/My Drive/epochs/model_5_4_camera2.0003-0.0383.h5
703/703 [==============================] - 4s 6ms/step - loss: 0.0413 - root_mean_squared_error: 0.2033 - val_loss: 0.0383 - val_root_mean_squared_error: 0.1956
Epoch 4/30
699/703 [============================>.] - ETA: 0s - loss: 0.0405 - root_mean_squared_error: 0.2012
Epoch 00004: val_loss improved from 0.03827 to 0.03820, saving model to /content/drive/My Drive/epochs/model_5_4_camera2.0004-0.0382.h5
703/703 [==============================] - 5s 7ms/step - loss: 0.0405 - root_mean_squared_error: 0.2012 - val_loss: 0.0382 - val_root_mean_squared_error: 0.1955
Epoch 5/30
694/703 [============================>.] - ETA: 0s - loss: 0.0396 - root_mean_squared_error: 0.1990
Epoch 00005: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0395 - root_mean_squared_error: 0.1987 - val_loss: 0.0391 - val_root_mean_squared_error: 0.1976
Epoch 6/30
700/703 [============================>.] - ETA: 0s - loss: 0.0388 - root_mean_squared_error: 0.1970
Epoch 00006: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0387 - root_mean_squared_error: 0.1968 - val_loss: 0.0396 - val_root_mean_squared_error: 0.1991
Epoch 7/30
698/703 [============================>.] - ETA: 0s - loss: 0.0380 - root_mean_squared_error: 0.1951
Epoch 00007: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0382 - root_mean_squared_error: 0.1954 - val_loss: 0.0400 - val_root_mean_squared_error: 0.2000
Epoch 8/30
694/703 [============================>.] - ETA: 0s - loss: 0.0372 - root_mean_squared_error: 0.1929
Epoch 00008: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0377 - root_mean_squared_error: 0.1941 - val_loss: 0.0396 - val_root_mean_squared_error: 0.1990
Epoch 9/30
701/703 [============================>.] - ETA: 0s - loss: 0.0366 - root_mean_squared_error: 0.1913
Epoch 00009: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0370 - root_mean_squared_error: 0.1924 - val_loss: 0.0401 - val_root_mean_squared_error: 0.2003
Epoch 10/30
701/703 [============================>.] - ETA: 0s - loss: 0.0361 - root_mean_squared_error: 0.1900
Epoch 00010: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0363 - root_mean_squared_error: 0.1905 - val_loss: 0.0404 - val_root_mean_squared_error: 0.2009
Epoch 11/30
695/703 [============================>.] - ETA: 0s - loss: 0.0358 - root_mean_squared_error: 0.1891
Epoch 00011: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0358 - root_mean_squared_error: 0.1892 - val_loss: 0.0417 - val_root_mean_squared_error: 0.2041
Epoch 12/30
700/703 [============================>.] - ETA: 0s - loss: 0.0356 - root_mean_squared_error: 0.1886
Epoch 00012: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0355 - root_mean_squared_error: 0.1885 - val_loss: 0.0414 - val_root_mean_squared_error: 0.2034
Epoch 13/30
695/703 [============================>.] - ETA: 0s - loss: 0.0345 - root_mean_squared_error: 0.1858
Epoch 00013: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0349 - root_mean_squared_error: 0.1869 - val_loss: 0.0425 - val_root_mean_squared_error: 0.2062
Epoch 14/30
699/703 [============================>.] - ETA: 0s - loss: 0.0347 - root_mean_squared_error: 0.1862
Epoch 00014: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0346 - root_mean_squared_error: 0.1860 - val_loss: 0.0434 - val_root_mean_squared_error: 0.2084
Epoch 15/30
698/703 [============================>.] - ETA: 0s - loss: 0.0343 - root_mean_squared_error: 0.1853
Epoch 00015: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0342 - root_mean_squared_error: 0.1849 - val_loss: 0.0431 - val_root_mean_squared_error: 0.2076
Epoch 16/30
696/703 [============================>.] - ETA: 0s - loss: 0.0339 - root_mean_squared_error: 0.1841
Epoch 00016: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0338 - root_mean_squared_error: 0.1839 - val_loss: 0.0438 - val_root_mean_squared_error: 0.2092
Epoch 17/30
698/703 [============================>.] - ETA: 0s - loss: 0.0334 - root_mean_squared_error: 0.1827
Epoch 00017: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0335 - root_mean_squared_error: 0.1829 - val_loss: 0.0448 - val_root_mean_squared_error: 0.2117
Epoch 18/30
699/703 [============================>.] - ETA: 0s - loss: 0.0330 - root_mean_squared_error: 0.1816
Epoch 00018: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0330 - root_mean_squared_error: 0.1817 - val_loss: 0.0444 - val_root_mean_squared_error: 0.2106
Epoch 19/30
699/703 [============================>.] - ETA: 0s - loss: 0.0327 - root_mean_squared_error: 0.1809
Epoch 00019: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0328 - root_mean_squared_error: 0.1811 - val_loss: 0.0442 - val_root_mean_squared_error: 0.2102
Epoch 20/30
700/703 [============================>.] - ETA: 0s - loss: 0.0325 - root_mean_squared_error: 0.1802
Epoch 00020: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0324 - root_mean_squared_error: 0.1801 - val_loss: 0.0452 - val_root_mean_squared_error: 0.2126
Epoch 21/30
697/703 [============================>.] - ETA: 0s - loss: 0.0321 - root_mean_squared_error: 0.1790
Epoch 00021: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0320 - root_mean_squared_error: 0.1788 - val_loss: 0.0442 - val_root_mean_squared_error: 0.2102
Epoch 22/30
698/703 [============================>.] - ETA: 0s - loss: 0.0318 - root_mean_squared_error: 0.1782
Epoch 00022: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0318 - root_mean_squared_error: 0.1783 - val_loss: 0.0456 - val_root_mean_squared_error: 0.2134
Epoch 23/30
697/703 [============================>.] - ETA: 0s - loss: 0.0315 - root_mean_squared_error: 0.1773
Epoch 00023: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0316 - root_mean_squared_error: 0.1777 - val_loss: 0.0448 - val_root_mean_squared_error: 0.2116
Epoch 24/30
699/703 [============================>.] - ETA: 0s - loss: 0.0316 - root_mean_squared_error: 0.1777
Epoch 00024: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0315 - root_mean_squared_error: 0.1775 - val_loss: 0.0458 - val_root_mean_squared_error: 0.2141
Epoch 25/30
697/703 [============================>.] - ETA: 0s - loss: 0.0308 - root_mean_squared_error: 0.1756
Epoch 00025: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0307 - root_mean_squared_error: 0.1753 - val_loss: 0.0461 - val_root_mean_squared_error: 0.2148
Epoch 26/30
697/703 [============================>.] - ETA: 0s - loss: 0.0310 - root_mean_squared_error: 0.1760
Epoch 00026: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0308 - root_mean_squared_error: 0.1755 - val_loss: 0.0470 - val_root_mean_squared_error: 0.2168
Epoch 27/30
701/703 [============================>.] - ETA: 0s - loss: 0.0305 - root_mean_squared_error: 0.1747
Epoch 00027: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0306 - root_mean_squared_error: 0.1749 - val_loss: 0.0463 - val_root_mean_squared_error: 0.2153
Epoch 28/30
703/703 [==============================] - ETA: 0s - loss: 0.0302 - root_mean_squared_error: 0.1738
Epoch 00028: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0302 - root_mean_squared_error: 0.1738 - val_loss: 0.0465 - val_root_mean_squared_error: 0.2156
Epoch 29/30
695/703 [============================>.] - ETA: 0s - loss: 0.0297 - root_mean_squared_error: 0.1725
Epoch 00029: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0300 - root_mean_squared_error: 0.1731 - val_loss: 0.0469 - val_root_mean_squared_error: 0.2166
Epoch 30/30
702/703 [============================>.] - ETA: 0s - loss: 0.0296 - root_mean_squared_error: 0.1721
Epoch 00030: val_loss did not improve from 0.03820
703/703 [==============================] - 4s 6ms/step - loss: 0.0297 - root_mean_squared_error: 0.1722 - val_loss: 0.0467 - val_root_mean_squared_error: 0.2161
ticks = [i for i in range(10)]
labels = [i for i in range(0, 11)]_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera2', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_4_camera2.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera2 = model_history('model_5_4_camera2')_____no_output_____model_2_camera4.head()_____no_output_____#################### end of training camera2 data for model 3_____no_output_____########################### start of train with camera3 data for model 3_____no_output_____camera5 = load('/content/drive/My Drive/datasets/camera5_cleaned.npz')
log5 = pd.read_csv('/content/drive/My Drive/datasets/log5_cleaned.csv')_____no_output_____camera_processing(camera5, 'camera5')Done
log_processing(log5, 'log5')Done
train_split('camera5', 'log5')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_4_camera2.0004-0.0382.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera3')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_5_camera3.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
212/212 [==============================] - ETA: 0s - loss: 0.0727 - root_mean_squared_error: 0.2696
Epoch 00001: val_loss improved from inf to 0.05052, saving model to /content/drive/My Drive/epochs/model_5_5_camera3.0001-0.0505.h5
212/212 [==============================] - 2s 8ms/step - loss: 0.0727 - root_mean_squared_error: 0.2696 - val_loss: 0.0505 - val_root_mean_squared_error: 0.2248
Epoch 2/30
207/212 [============================>.] - ETA: 0s - loss: 0.0642 - root_mean_squared_error: 0.2534
Epoch 00002: val_loss improved from 0.05052 to 0.04959, saving model to /content/drive/My Drive/epochs/model_5_5_camera3.0002-0.0496.h5
212/212 [==============================] - 1s 7ms/step - loss: 0.0632 - root_mean_squared_error: 0.2514 - val_loss: 0.0496 - val_root_mean_squared_error: 0.2227
Epoch 3/30
207/212 [============================>.] - ETA: 0s - loss: 0.0597 - root_mean_squared_error: 0.2444
Epoch 00003: val_loss improved from 0.04959 to 0.04791, saving model to /content/drive/My Drive/epochs/model_5_5_camera3.0003-0.0479.h5
212/212 [==============================] - 1s 7ms/step - loss: 0.0594 - root_mean_squared_error: 0.2437 - val_loss: 0.0479 - val_root_mean_squared_error: 0.2189
Epoch 4/30
203/212 [===========================>..] - ETA: 0s - loss: 0.0571 - root_mean_squared_error: 0.2389
Epoch 00004: val_loss did not improve from 0.04791
212/212 [==============================] - 1s 6ms/step - loss: 0.0573 - root_mean_squared_error: 0.2394 - val_loss: 0.0480 - val_root_mean_squared_error: 0.2191
Epoch 5/30
207/212 [============================>.] - ETA: 0s - loss: 0.0563 - root_mean_squared_error: 0.2373
Epoch 00005: val_loss improved from 0.04791 to 0.04643, saving model to /content/drive/My Drive/epochs/model_5_5_camera3.0005-0.0464.h5
212/212 [==============================] - 1s 7ms/step - loss: 0.0558 - root_mean_squared_error: 0.2363 - val_loss: 0.0464 - val_root_mean_squared_error: 0.2155
Epoch 6/30
209/212 [============================>.] - ETA: 0s - loss: 0.0547 - root_mean_squared_error: 0.2339
Epoch 00006: val_loss did not improve from 0.04643
212/212 [==============================] - 1s 6ms/step - loss: 0.0543 - root_mean_squared_error: 0.2330 - val_loss: 0.0468 - val_root_mean_squared_error: 0.2163
Epoch 7/30
211/212 [============================>.] - ETA: 0s - loss: 0.0533 - root_mean_squared_error: 0.2309
Epoch 00007: val_loss did not improve from 0.04643
212/212 [==============================] - 1s 7ms/step - loss: 0.0532 - root_mean_squared_error: 0.2306 - val_loss: 0.0466 - val_root_mean_squared_error: 0.2159
Epoch 8/30
208/212 [============================>.] - ETA: 0s - loss: 0.0516 - root_mean_squared_error: 0.2273
Epoch 00008: val_loss improved from 0.04643 to 0.04639, saving model to /content/drive/My Drive/epochs/model_5_5_camera3.0008-0.0464.h5
212/212 [==============================] - 2s 7ms/step - loss: 0.0519 - root_mean_squared_error: 0.2279 - val_loss: 0.0464 - val_root_mean_squared_error: 0.2154
Epoch 9/30
207/212 [============================>.] - ETA: 0s - loss: 0.0512 - root_mean_squared_error: 0.2263
Epoch 00009: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 7ms/step - loss: 0.0509 - root_mean_squared_error: 0.2256 - val_loss: 0.0469 - val_root_mean_squared_error: 0.2165
Epoch 10/30
205/212 [============================>.] - ETA: 0s - loss: 0.0492 - root_mean_squared_error: 0.2219
Epoch 00010: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0496 - root_mean_squared_error: 0.2226 - val_loss: 0.0468 - val_root_mean_squared_error: 0.2163
Epoch 11/30
211/212 [============================>.] - ETA: 0s - loss: 0.0494 - root_mean_squared_error: 0.2223
Epoch 00011: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0493 - root_mean_squared_error: 0.2220 - val_loss: 0.0477 - val_root_mean_squared_error: 0.2185
Epoch 12/30
211/212 [============================>.] - ETA: 0s - loss: 0.0489 - root_mean_squared_error: 0.2212
Epoch 00012: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0489 - root_mean_squared_error: 0.2212 - val_loss: 0.0466 - val_root_mean_squared_error: 0.2158
Epoch 13/30
206/212 [============================>.] - ETA: 0s - loss: 0.0483 - root_mean_squared_error: 0.2199
Epoch 00013: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0486 - root_mean_squared_error: 0.2204 - val_loss: 0.0486 - val_root_mean_squared_error: 0.2204
Epoch 14/30
206/212 [============================>.] - ETA: 0s - loss: 0.0476 - root_mean_squared_error: 0.2182
Epoch 00014: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0473 - root_mean_squared_error: 0.2175 - val_loss: 0.0499 - val_root_mean_squared_error: 0.2235
Epoch 15/30
204/212 [===========================>..] - ETA: 0s - loss: 0.0469 - root_mean_squared_error: 0.2165
Epoch 00015: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0475 - root_mean_squared_error: 0.2178 - val_loss: 0.0476 - val_root_mean_squared_error: 0.2182
Epoch 16/30
205/212 [============================>.] - ETA: 0s - loss: 0.0469 - root_mean_squared_error: 0.2166
Epoch 00016: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 7ms/step - loss: 0.0469 - root_mean_squared_error: 0.2166 - val_loss: 0.0496 - val_root_mean_squared_error: 0.2228
Epoch 17/30
212/212 [==============================] - ETA: 0s - loss: 0.0457 - root_mean_squared_error: 0.2139
Epoch 00017: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0457 - root_mean_squared_error: 0.2139 - val_loss: 0.0487 - val_root_mean_squared_error: 0.2207
Epoch 18/30
209/212 [============================>.] - ETA: 0s - loss: 0.0460 - root_mean_squared_error: 0.2145
Epoch 00018: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0460 - root_mean_squared_error: 0.2144 - val_loss: 0.0487 - val_root_mean_squared_error: 0.2207
Epoch 19/30
212/212 [==============================] - ETA: 0s - loss: 0.0453 - root_mean_squared_error: 0.2128
Epoch 00019: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0453 - root_mean_squared_error: 0.2128 - val_loss: 0.0480 - val_root_mean_squared_error: 0.2191
Epoch 20/30
212/212 [==============================] - ETA: 0s - loss: 0.0450 - root_mean_squared_error: 0.2121
Epoch 00020: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0450 - root_mean_squared_error: 0.2121 - val_loss: 0.0488 - val_root_mean_squared_error: 0.2210
Epoch 21/30
205/212 [============================>.] - ETA: 0s - loss: 0.0434 - root_mean_squared_error: 0.2082
Epoch 00021: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0440 - root_mean_squared_error: 0.2097 - val_loss: 0.0495 - val_root_mean_squared_error: 0.2226
Epoch 22/30
203/212 [===========================>..] - ETA: 0s - loss: 0.0448 - root_mean_squared_error: 0.2116
Epoch 00022: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0440 - root_mean_squared_error: 0.2098 - val_loss: 0.0493 - val_root_mean_squared_error: 0.2220
Epoch 23/30
206/212 [============================>.] - ETA: 0s - loss: 0.0447 - root_mean_squared_error: 0.2113
Epoch 00023: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 7ms/step - loss: 0.0442 - root_mean_squared_error: 0.2102 - val_loss: 0.0486 - val_root_mean_squared_error: 0.2204
Epoch 24/30
204/212 [===========================>..] - ETA: 0s - loss: 0.0437 - root_mean_squared_error: 0.2090
Epoch 00024: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0440 - root_mean_squared_error: 0.2098 - val_loss: 0.0498 - val_root_mean_squared_error: 0.2231
Epoch 25/30
211/212 [============================>.] - ETA: 0s - loss: 0.0432 - root_mean_squared_error: 0.2078
Epoch 00025: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0431 - root_mean_squared_error: 0.2075 - val_loss: 0.0494 - val_root_mean_squared_error: 0.2222
Epoch 26/30
204/212 [===========================>..] - ETA: 0s - loss: 0.0421 - root_mean_squared_error: 0.2052
Epoch 00026: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0426 - root_mean_squared_error: 0.2063 - val_loss: 0.0497 - val_root_mean_squared_error: 0.2230
Epoch 27/30
206/212 [============================>.] - ETA: 0s - loss: 0.0430 - root_mean_squared_error: 0.2074
Epoch 00027: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0426 - root_mean_squared_error: 0.2064 - val_loss: 0.0495 - val_root_mean_squared_error: 0.2224
Epoch 28/30
203/212 [===========================>..] - ETA: 0s - loss: 0.0424 - root_mean_squared_error: 0.2058
Epoch 00028: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0422 - root_mean_squared_error: 0.2054 - val_loss: 0.0522 - val_root_mean_squared_error: 0.2284
Epoch 29/30
203/212 [===========================>..] - ETA: 0s - loss: 0.0400 - root_mean_squared_error: 0.2000
Epoch 00029: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0416 - root_mean_squared_error: 0.2041 - val_loss: 0.0507 - val_root_mean_squared_error: 0.2251
Epoch 30/30
204/212 [===========================>..] - ETA: 0s - loss: 0.0412 - root_mean_squared_error: 0.2030
Epoch 00030: val_loss did not improve from 0.04639
212/212 [==============================] - 1s 6ms/step - loss: 0.0415 - root_mean_squared_error: 0.2037 - val_loss: 0.0501 - val_root_mean_squared_error: 0.2238
ticks = [i for i in range(0, 31, 5)]
labels = [i for i in range(0, 31, 5)]
labels[0] = 1_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera3', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_5_camera3.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera3 = model_history('model_5_5_camera3')_____no_output_____model_2_camera5.head()_____no_output_____#################### end of training camera3 data for model 3_____no_output_____########################### start of train with camera4 data for model 3_____no_output_____camera6 = load('/content/drive/My Drive/datasets/camera6_cleaned.npz')
log6 = pd.read_csv('/content/drive/My Drive/datasets/log6_cleaned.csv')_____no_output_____camera_processing(camera6, 'camera6')Done
log_processing(log6, 'log6')Done
train_split('camera6', 'log6')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_5_camera3.0008-0.0464.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera4')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_6_camera4.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
668/668 [==============================] - ETA: 0s - loss: 0.1216 - root_mean_squared_error: 0.3487
Epoch 00001: val_loss improved from inf to 0.13672, saving model to /content/drive/My Drive/epochs/model_5_6_camera4.0001-0.1367.h5
668/668 [==============================] - 5s 7ms/step - loss: 0.1216 - root_mean_squared_error: 0.3487 - val_loss: 0.1367 - val_root_mean_squared_error: 0.3698
Epoch 2/30
667/668 [============================>.] - ETA: 0s - loss: 0.1131 - root_mean_squared_error: 0.3363
Epoch 00002: val_loss improved from 0.13672 to 0.13305, saving model to /content/drive/My Drive/epochs/model_5_6_camera4.0002-0.1331.h5
668/668 [==============================] - 4s 7ms/step - loss: 0.1131 - root_mean_squared_error: 0.3363 - val_loss: 0.1331 - val_root_mean_squared_error: 0.3648
Epoch 3/30
665/668 [============================>.] - ETA: 0s - loss: 0.1085 - root_mean_squared_error: 0.3294
Epoch 00003: val_loss improved from 0.13305 to 0.13265, saving model to /content/drive/My Drive/epochs/model_5_6_camera4.0003-0.1327.h5
668/668 [==============================] - 4s 7ms/step - loss: 0.1082 - root_mean_squared_error: 0.3290 - val_loss: 0.1327 - val_root_mean_squared_error: 0.3642
Epoch 4/30
661/668 [============================>.] - ETA: 0s - loss: 0.1023 - root_mean_squared_error: 0.3198
Epoch 00004: val_loss improved from 0.13265 to 0.13179, saving model to /content/drive/My Drive/epochs/model_5_6_camera4.0004-0.1318.h5
668/668 [==============================] - 5s 7ms/step - loss: 0.1044 - root_mean_squared_error: 0.3231 - val_loss: 0.1318 - val_root_mean_squared_error: 0.3630
Epoch 5/30
664/668 [============================>.] - ETA: 0s - loss: 0.1015 - root_mean_squared_error: 0.3187
Epoch 00005: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.1014 - root_mean_squared_error: 0.3184 - val_loss: 0.1332 - val_root_mean_squared_error: 0.3649
Epoch 6/30
667/668 [============================>.] - ETA: 0s - loss: 0.0982 - root_mean_squared_error: 0.3134
Epoch 00006: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0982 - root_mean_squared_error: 0.3133 - val_loss: 0.1346 - val_root_mean_squared_error: 0.3669
Epoch 7/30
667/668 [============================>.] - ETA: 0s - loss: 0.0961 - root_mean_squared_error: 0.3100
Epoch 00007: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0961 - root_mean_squared_error: 0.3100 - val_loss: 0.1355 - val_root_mean_squared_error: 0.3681
Epoch 8/30
665/668 [============================>.] - ETA: 0s - loss: 0.0941 - root_mean_squared_error: 0.3067
Epoch 00008: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0939 - root_mean_squared_error: 0.3064 - val_loss: 0.1363 - val_root_mean_squared_error: 0.3692
Epoch 9/30
662/668 [============================>.] - ETA: 0s - loss: 0.0920 - root_mean_squared_error: 0.3033
Epoch 00009: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0919 - root_mean_squared_error: 0.3032 - val_loss: 0.1378 - val_root_mean_squared_error: 0.3712
Epoch 10/30
665/668 [============================>.] - ETA: 0s - loss: 0.0908 - root_mean_squared_error: 0.3012
Epoch 00010: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0906 - root_mean_squared_error: 0.3010 - val_loss: 0.1406 - val_root_mean_squared_error: 0.3750
Epoch 11/30
668/668 [==============================] - ETA: 0s - loss: 0.0888 - root_mean_squared_error: 0.2980
Epoch 00011: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0888 - root_mean_squared_error: 0.2980 - val_loss: 0.1411 - val_root_mean_squared_error: 0.3757
Epoch 12/30
660/668 [============================>.] - ETA: 0s - loss: 0.0879 - root_mean_squared_error: 0.2964
Epoch 00012: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0876 - root_mean_squared_error: 0.2960 - val_loss: 0.1413 - val_root_mean_squared_error: 0.3759
Epoch 13/30
660/668 [============================>.] - ETA: 0s - loss: 0.0861 - root_mean_squared_error: 0.2935
Epoch 00013: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0861 - root_mean_squared_error: 0.2934 - val_loss: 0.1446 - val_root_mean_squared_error: 0.3802
Epoch 14/30
660/668 [============================>.] - ETA: 0s - loss: 0.0848 - root_mean_squared_error: 0.2913
Epoch 00014: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0849 - root_mean_squared_error: 0.2914 - val_loss: 0.1449 - val_root_mean_squared_error: 0.3807
Epoch 15/30
668/668 [==============================] - ETA: 0s - loss: 0.0834 - root_mean_squared_error: 0.2888
Epoch 00015: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0834 - root_mean_squared_error: 0.2888 - val_loss: 0.1452 - val_root_mean_squared_error: 0.3811
Epoch 16/30
668/668 [==============================] - ETA: 0s - loss: 0.0829 - root_mean_squared_error: 0.2880
Epoch 00016: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0829 - root_mean_squared_error: 0.2880 - val_loss: 0.1464 - val_root_mean_squared_error: 0.3826
Epoch 17/30
664/668 [============================>.] - ETA: 0s - loss: 0.0820 - root_mean_squared_error: 0.2864
Epoch 00017: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0819 - root_mean_squared_error: 0.2861 - val_loss: 0.1481 - val_root_mean_squared_error: 0.3848
Epoch 18/30
666/668 [============================>.] - ETA: 0s - loss: 0.0800 - root_mean_squared_error: 0.2828
Epoch 00018: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0801 - root_mean_squared_error: 0.2830 - val_loss: 0.1502 - val_root_mean_squared_error: 0.3875
Epoch 19/30
660/668 [============================>.] - ETA: 0s - loss: 0.0798 - root_mean_squared_error: 0.2825
Epoch 00019: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0800 - root_mean_squared_error: 0.2829 - val_loss: 0.1497 - val_root_mean_squared_error: 0.3869
Epoch 20/30
666/668 [============================>.] - ETA: 0s - loss: 0.0786 - root_mean_squared_error: 0.2803
Epoch 00020: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0786 - root_mean_squared_error: 0.2804 - val_loss: 0.1495 - val_root_mean_squared_error: 0.3866
Epoch 21/30
663/668 [============================>.] - ETA: 0s - loss: 0.0768 - root_mean_squared_error: 0.2771
Epoch 00021: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0773 - root_mean_squared_error: 0.2780 - val_loss: 0.1508 - val_root_mean_squared_error: 0.3884
Epoch 22/30
660/668 [============================>.] - ETA: 0s - loss: 0.0769 - root_mean_squared_error: 0.2773
Epoch 00022: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 7ms/step - loss: 0.0770 - root_mean_squared_error: 0.2775 - val_loss: 0.1520 - val_root_mean_squared_error: 0.3898
Epoch 23/30
661/668 [============================>.] - ETA: 0s - loss: 0.0762 - root_mean_squared_error: 0.2761
Epoch 00023: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0763 - root_mean_squared_error: 0.2761 - val_loss: 0.1524 - val_root_mean_squared_error: 0.3904
Epoch 24/30
668/668 [==============================] - ETA: 0s - loss: 0.0756 - root_mean_squared_error: 0.2750
Epoch 00024: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0756 - root_mean_squared_error: 0.2750 - val_loss: 0.1531 - val_root_mean_squared_error: 0.3913
Epoch 25/30
664/668 [============================>.] - ETA: 0s - loss: 0.0747 - root_mean_squared_error: 0.2733
Epoch 00025: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0750 - root_mean_squared_error: 0.2738 - val_loss: 0.1560 - val_root_mean_squared_error: 0.3949
Epoch 26/30
666/668 [============================>.] - ETA: 0s - loss: 0.0744 - root_mean_squared_error: 0.2728
Epoch 00026: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0744 - root_mean_squared_error: 0.2727 - val_loss: 0.1556 - val_root_mean_squared_error: 0.3944
Epoch 27/30
667/668 [============================>.] - ETA: 0s - loss: 0.0742 - root_mean_squared_error: 0.2725
Epoch 00027: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0742 - root_mean_squared_error: 0.2724 - val_loss: 0.1541 - val_root_mean_squared_error: 0.3925
Epoch 28/30
662/668 [============================>.] - ETA: 0s - loss: 0.0728 - root_mean_squared_error: 0.2698
Epoch 00028: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0727 - root_mean_squared_error: 0.2696 - val_loss: 0.1557 - val_root_mean_squared_error: 0.3946
Epoch 29/30
660/668 [============================>.] - ETA: 0s - loss: 0.0722 - root_mean_squared_error: 0.2687
Epoch 00029: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0720 - root_mean_squared_error: 0.2683 - val_loss: 0.1586 - val_root_mean_squared_error: 0.3982
Epoch 30/30
666/668 [============================>.] - ETA: 0s - loss: 0.0718 - root_mean_squared_error: 0.2680
Epoch 00030: val_loss did not improve from 0.13179
668/668 [==============================] - 4s 6ms/step - loss: 0.0718 - root_mean_squared_error: 0.2679 - val_loss: 0.1565 - val_root_mean_squared_error: 0.3956
ticks = [i for i in range(10)]
labels = [i for i in range(0, 11)]_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera6', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_6_camera4.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera4 = model_history('model_5_6_camera4')_____no_output_____model_2_camera6.head()_____no_output_____#################### end of training camera4 data for model 3_____no_output_____########################### start of train with camera5 data for model 3_____no_output_____camera7 = load('/content/drive/My Drive/datasets/camera7_cleaned.npz')
log7 = pd.read_csv('/content/drive/My Drive/datasets/log7_cleaned.csv')_____no_output_____camera_processing(camera7, 'camera7')Done
log_processing(log7, 'log7')Done
train_split('camera7', 'log7')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_6_camera4.0004-0.1318.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera5')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_7_camera5.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
284/289 [============================>.] - ETA: 0s - loss: 0.1137 - root_mean_squared_error: 0.3372
Epoch 00001: val_loss improved from inf to 0.14154, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0001-0.1415.h5
289/289 [==============================] - 3s 9ms/step - loss: 0.1138 - root_mean_squared_error: 0.3373 - val_loss: 0.1415 - val_root_mean_squared_error: 0.3762
Epoch 2/30
280/289 [============================>.] - ETA: 0s - loss: 0.1007 - root_mean_squared_error: 0.3174
Epoch 00002: val_loss improved from 0.14154 to 0.13828, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0002-0.1383.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0989 - root_mean_squared_error: 0.3144 - val_loss: 0.1383 - val_root_mean_squared_error: 0.3719
Epoch 3/30
282/289 [============================>.] - ETA: 0s - loss: 0.0961 - root_mean_squared_error: 0.3100
Epoch 00003: val_loss improved from 0.13828 to 0.13581, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0003-0.1358.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0947 - root_mean_squared_error: 0.3078 - val_loss: 0.1358 - val_root_mean_squared_error: 0.3685
Epoch 4/30
286/289 [============================>.] - ETA: 0s - loss: 0.0917 - root_mean_squared_error: 0.3028
Epoch 00004: val_loss improved from 0.13581 to 0.13488, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0004-0.1349.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0913 - root_mean_squared_error: 0.3022 - val_loss: 0.1349 - val_root_mean_squared_error: 0.3673
Epoch 5/30
285/289 [============================>.] - ETA: 0s - loss: 0.0865 - root_mean_squared_error: 0.2941
Epoch 00005: val_loss improved from 0.13488 to 0.13354, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0005-0.1335.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0880 - root_mean_squared_error: 0.2967 - val_loss: 0.1335 - val_root_mean_squared_error: 0.3654
Epoch 6/30
287/289 [============================>.] - ETA: 0s - loss: 0.0855 - root_mean_squared_error: 0.2924
Epoch 00006: val_loss improved from 0.13354 to 0.13233, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0006-0.1323.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0852 - root_mean_squared_error: 0.2920 - val_loss: 0.1323 - val_root_mean_squared_error: 0.3638
Epoch 7/30
282/289 [============================>.] - ETA: 0s - loss: 0.0833 - root_mean_squared_error: 0.2887
Epoch 00007: val_loss did not improve from 0.13233
289/289 [==============================] - 2s 7ms/step - loss: 0.0824 - root_mean_squared_error: 0.2871 - val_loss: 0.1324 - val_root_mean_squared_error: 0.3638
Epoch 8/30
288/289 [============================>.] - ETA: 0s - loss: 0.0803 - root_mean_squared_error: 0.2834
Epoch 00008: val_loss improved from 0.13233 to 0.13203, saving model to /content/drive/My Drive/epochs/model_5_7_camera5.0008-0.1320.h5
289/289 [==============================] - 2s 7ms/step - loss: 0.0802 - root_mean_squared_error: 0.2833 - val_loss: 0.1320 - val_root_mean_squared_error: 0.3634
Epoch 9/30
285/289 [============================>.] - ETA: 0s - loss: 0.0789 - root_mean_squared_error: 0.2810
Epoch 00009: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0783 - root_mean_squared_error: 0.2798 - val_loss: 0.1333 - val_root_mean_squared_error: 0.3651
Epoch 10/30
282/289 [============================>.] - ETA: 0s - loss: 0.0761 - root_mean_squared_error: 0.2759
Epoch 00010: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0766 - root_mean_squared_error: 0.2767 - val_loss: 0.1328 - val_root_mean_squared_error: 0.3645
Epoch 11/30
283/289 [============================>.] - ETA: 0s - loss: 0.0749 - root_mean_squared_error: 0.2737
Epoch 00011: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0745 - root_mean_squared_error: 0.2730 - val_loss: 0.1334 - val_root_mean_squared_error: 0.3652
Epoch 12/30
281/289 [============================>.] - ETA: 0s - loss: 0.0741 - root_mean_squared_error: 0.2722
Epoch 00012: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0733 - root_mean_squared_error: 0.2707 - val_loss: 0.1348 - val_root_mean_squared_error: 0.3671
Epoch 13/30
281/289 [============================>.] - ETA: 0s - loss: 0.0726 - root_mean_squared_error: 0.2694
Epoch 00013: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0717 - root_mean_squared_error: 0.2678 - val_loss: 0.1362 - val_root_mean_squared_error: 0.3691
Epoch 14/30
289/289 [==============================] - ETA: 0s - loss: 0.0705 - root_mean_squared_error: 0.2656
Epoch 00014: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0705 - root_mean_squared_error: 0.2656 - val_loss: 0.1346 - val_root_mean_squared_error: 0.3669
Epoch 15/30
286/289 [============================>.] - ETA: 0s - loss: 0.0688 - root_mean_squared_error: 0.2624
Epoch 00015: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0693 - root_mean_squared_error: 0.2632 - val_loss: 0.1353 - val_root_mean_squared_error: 0.3679
Epoch 16/30
287/289 [============================>.] - ETA: 0s - loss: 0.0683 - root_mean_squared_error: 0.2613
Epoch 00016: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0681 - root_mean_squared_error: 0.2609 - val_loss: 0.1347 - val_root_mean_squared_error: 0.3670
Epoch 17/30
282/289 [============================>.] - ETA: 0s - loss: 0.0675 - root_mean_squared_error: 0.2597
Epoch 00017: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0675 - root_mean_squared_error: 0.2599 - val_loss: 0.1348 - val_root_mean_squared_error: 0.3672
Epoch 18/30
288/289 [============================>.] - ETA: 0s - loss: 0.0660 - root_mean_squared_error: 0.2570
Epoch 00018: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0660 - root_mean_squared_error: 0.2569 - val_loss: 0.1341 - val_root_mean_squared_error: 0.3661
Epoch 19/30
288/289 [============================>.] - ETA: 0s - loss: 0.0648 - root_mean_squared_error: 0.2546
Epoch 00019: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0648 - root_mean_squared_error: 0.2545 - val_loss: 0.1352 - val_root_mean_squared_error: 0.3677
Epoch 20/30
283/289 [============================>.] - ETA: 0s - loss: 0.0644 - root_mean_squared_error: 0.2538
Epoch 00020: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0642 - root_mean_squared_error: 0.2534 - val_loss: 0.1348 - val_root_mean_squared_error: 0.3671
Epoch 21/30
284/289 [============================>.] - ETA: 0s - loss: 0.0626 - root_mean_squared_error: 0.2501
Epoch 00021: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0638 - root_mean_squared_error: 0.2527 - val_loss: 0.1362 - val_root_mean_squared_error: 0.3690
Epoch 22/30
280/289 [============================>.] - ETA: 0s - loss: 0.0634 - root_mean_squared_error: 0.2517
Epoch 00022: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0630 - root_mean_squared_error: 0.2509 - val_loss: 0.1380 - val_root_mean_squared_error: 0.3715
Epoch 23/30
289/289 [==============================] - ETA: 0s - loss: 0.0617 - root_mean_squared_error: 0.2484
Epoch 00023: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0617 - root_mean_squared_error: 0.2484 - val_loss: 0.1360 - val_root_mean_squared_error: 0.3688
Epoch 24/30
287/289 [============================>.] - ETA: 0s - loss: 0.0619 - root_mean_squared_error: 0.2487
Epoch 00024: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0619 - root_mean_squared_error: 0.2489 - val_loss: 0.1379 - val_root_mean_squared_error: 0.3713
Epoch 25/30
282/289 [============================>.] - ETA: 0s - loss: 0.0601 - root_mean_squared_error: 0.2452
Epoch 00025: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0609 - root_mean_squared_error: 0.2468 - val_loss: 0.1372 - val_root_mean_squared_error: 0.3705
Epoch 26/30
287/289 [============================>.] - ETA: 0s - loss: 0.0600 - root_mean_squared_error: 0.2449
Epoch 00026: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0602 - root_mean_squared_error: 0.2453 - val_loss: 0.1382 - val_root_mean_squared_error: 0.3718
Epoch 27/30
282/289 [============================>.] - ETA: 0s - loss: 0.0591 - root_mean_squared_error: 0.2430
Epoch 00027: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 7ms/step - loss: 0.0591 - root_mean_squared_error: 0.2432 - val_loss: 0.1388 - val_root_mean_squared_error: 0.3725
Epoch 28/30
280/289 [============================>.] - ETA: 0s - loss: 0.0599 - root_mean_squared_error: 0.2447
Epoch 00028: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0599 - root_mean_squared_error: 0.2448 - val_loss: 0.1400 - val_root_mean_squared_error: 0.3742
Epoch 29/30
285/289 [============================>.] - ETA: 0s - loss: 0.0583 - root_mean_squared_error: 0.2414
Epoch 00029: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0581 - root_mean_squared_error: 0.2410 - val_loss: 0.1395 - val_root_mean_squared_error: 0.3735
Epoch 30/30
289/289 [==============================] - ETA: 0s - loss: 0.0577 - root_mean_squared_error: 0.2402
Epoch 00030: val_loss did not improve from 0.13203
289/289 [==============================] - 2s 6ms/step - loss: 0.0577 - root_mean_squared_error: 0.2402 - val_loss: 0.1403 - val_root_mean_squared_error: 0.3745
ticks = [i for i in range(10)]
labels = [i for i in range(1, 11)]_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera5', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_7_camera5.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera5 = model_history('model_5_7_camera5')_____no_output_____model_2_camera7.head()_____no_output_____#################### end of training camera5 data for model 3_____no_output_____########################### start of train with camera6 data for model 3_____no_output_____camera8 = load('/content/drive/My Drive/datasets/camera8_cleaned.npz')
log8 = pd.read_csv('/content/drive/My Drive/datasets/log8_cleaned.csv')_____no_output_____camera_processing(camera8, 'camera8')Done
log_processing(log8, 'log8')Done
train_split('camera8', 'log8')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_7_camera5.0008-0.1320.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera6')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_8_camera6.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
1026/1032 [============================>.] - ETA: 0s - loss: 0.0761 - root_mean_squared_error: 0.2759
Epoch 00001: val_loss improved from inf to 0.07317, saving model to /content/drive/My Drive/epochs/model_5_8_camera6.0001-0.0732.h5
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0763 - root_mean_squared_error: 0.2762 - val_loss: 0.0732 - val_root_mean_squared_error: 0.2705
Epoch 2/30
1030/1032 [============================>.] - ETA: 0s - loss: 0.0690 - root_mean_squared_error: 0.2626
Epoch 00002: val_loss improved from 0.07317 to 0.07173, saving model to /content/drive/My Drive/epochs/model_5_8_camera6.0002-0.0717.h5
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0689 - root_mean_squared_error: 0.2624 - val_loss: 0.0717 - val_root_mean_squared_error: 0.2678
Epoch 3/30
1029/1032 [============================>.] - ETA: 0s - loss: 0.0656 - root_mean_squared_error: 0.2561
Epoch 00003: val_loss improved from 0.07173 to 0.07048, saving model to /content/drive/My Drive/epochs/model_5_8_camera6.0003-0.0705.h5
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0656 - root_mean_squared_error: 0.2561 - val_loss: 0.0705 - val_root_mean_squared_error: 0.2655
Epoch 4/30
1032/1032 [==============================] - ETA: 0s - loss: 0.0640 - root_mean_squared_error: 0.2531
Epoch 00004: val_loss improved from 0.07048 to 0.07029, saving model to /content/drive/My Drive/epochs/model_5_8_camera6.0004-0.0703.h5
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0640 - root_mean_squared_error: 0.2531 - val_loss: 0.0703 - val_root_mean_squared_error: 0.2651
Epoch 5/30
1030/1032 [============================>.] - ETA: 0s - loss: 0.0626 - root_mean_squared_error: 0.2502
Epoch 00005: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0627 - root_mean_squared_error: 0.2504 - val_loss: 0.0705 - val_root_mean_squared_error: 0.2655
Epoch 6/30
1023/1032 [============================>.] - ETA: 0s - loss: 0.0617 - root_mean_squared_error: 0.2484
Epoch 00006: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0619 - root_mean_squared_error: 0.2487 - val_loss: 0.0717 - val_root_mean_squared_error: 0.2678
Epoch 7/30
1025/1032 [============================>.] - ETA: 0s - loss: 0.0613 - root_mean_squared_error: 0.2475
Epoch 00007: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0611 - root_mean_squared_error: 0.2472 - val_loss: 0.0716 - val_root_mean_squared_error: 0.2675
Epoch 8/30
1028/1032 [============================>.] - ETA: 0s - loss: 0.0608 - root_mean_squared_error: 0.2466
Epoch 00008: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0607 - root_mean_squared_error: 0.2464 - val_loss: 0.0712 - val_root_mean_squared_error: 0.2668
Epoch 9/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0601 - root_mean_squared_error: 0.2451
Epoch 00009: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0600 - root_mean_squared_error: 0.2449 - val_loss: 0.0711 - val_root_mean_squared_error: 0.2666
Epoch 10/30
1032/1032 [==============================] - ETA: 0s - loss: 0.0597 - root_mean_squared_error: 0.2444
Epoch 00010: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0597 - root_mean_squared_error: 0.2444 - val_loss: 0.0716 - val_root_mean_squared_error: 0.2676
Epoch 11/30
1030/1032 [============================>.] - ETA: 0s - loss: 0.0592 - root_mean_squared_error: 0.2434
Epoch 00011: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0592 - root_mean_squared_error: 0.2433 - val_loss: 0.0736 - val_root_mean_squared_error: 0.2713
Epoch 12/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0592 - root_mean_squared_error: 0.2432
Epoch 00012: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0590 - root_mean_squared_error: 0.2429 - val_loss: 0.0718 - val_root_mean_squared_error: 0.2680
Epoch 13/30
1031/1032 [============================>.] - ETA: 0s - loss: 0.0589 - root_mean_squared_error: 0.2426
Epoch 00013: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0589 - root_mean_squared_error: 0.2426 - val_loss: 0.0727 - val_root_mean_squared_error: 0.2696
Epoch 14/30
1029/1032 [============================>.] - ETA: 0s - loss: 0.0587 - root_mean_squared_error: 0.2422
Epoch 00014: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0586 - root_mean_squared_error: 0.2420 - val_loss: 0.0732 - val_root_mean_squared_error: 0.2706
Epoch 15/30
1025/1032 [============================>.] - ETA: 0s - loss: 0.0581 - root_mean_squared_error: 0.2410
Epoch 00015: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0579 - root_mean_squared_error: 0.2407 - val_loss: 0.0736 - val_root_mean_squared_error: 0.2713
Epoch 16/30
1024/1032 [============================>.] - ETA: 0s - loss: 0.0580 - root_mean_squared_error: 0.2409
Epoch 00016: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0579 - root_mean_squared_error: 0.2407 - val_loss: 0.0742 - val_root_mean_squared_error: 0.2723
Epoch 17/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0578 - root_mean_squared_error: 0.2404
Epoch 00017: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0577 - root_mean_squared_error: 0.2403 - val_loss: 0.0730 - val_root_mean_squared_error: 0.2701
Epoch 18/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0578 - root_mean_squared_error: 0.2404
Epoch 00018: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0577 - root_mean_squared_error: 0.2402 - val_loss: 0.0731 - val_root_mean_squared_error: 0.2703
Epoch 19/30
1028/1032 [============================>.] - ETA: 0s - loss: 0.0574 - root_mean_squared_error: 0.2395
Epoch 00019: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0573 - root_mean_squared_error: 0.2393 - val_loss: 0.0734 - val_root_mean_squared_error: 0.2710
Epoch 20/30
1025/1032 [============================>.] - ETA: 0s - loss: 0.0572 - root_mean_squared_error: 0.2391
Epoch 00020: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0574 - root_mean_squared_error: 0.2395 - val_loss: 0.0752 - val_root_mean_squared_error: 0.2742
Epoch 21/30
1031/1032 [============================>.] - ETA: 0s - loss: 0.0570 - root_mean_squared_error: 0.2387
Epoch 00021: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0570 - root_mean_squared_error: 0.2387 - val_loss: 0.0740 - val_root_mean_squared_error: 0.2721
Epoch 22/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0569 - root_mean_squared_error: 0.2385
Epoch 00022: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0568 - root_mean_squared_error: 0.2384 - val_loss: 0.0739 - val_root_mean_squared_error: 0.2719
Epoch 23/30
1030/1032 [============================>.] - ETA: 0s - loss: 0.0566 - root_mean_squared_error: 0.2379
Epoch 00023: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0568 - root_mean_squared_error: 0.2384 - val_loss: 0.0745 - val_root_mean_squared_error: 0.2729
Epoch 24/30
1025/1032 [============================>.] - ETA: 0s - loss: 0.0562 - root_mean_squared_error: 0.2371
Epoch 00024: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0566 - root_mean_squared_error: 0.2379 - val_loss: 0.0748 - val_root_mean_squared_error: 0.2736
Epoch 25/30
1031/1032 [============================>.] - ETA: 0s - loss: 0.0564 - root_mean_squared_error: 0.2375
Epoch 00025: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0564 - root_mean_squared_error: 0.2374 - val_loss: 0.0766 - val_root_mean_squared_error: 0.2768
Epoch 26/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0558 - root_mean_squared_error: 0.2362
Epoch 00026: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0562 - root_mean_squared_error: 0.2370 - val_loss: 0.0744 - val_root_mean_squared_error: 0.2728
Epoch 27/30
1024/1032 [============================>.] - ETA: 0s - loss: 0.0562 - root_mean_squared_error: 0.2371
Epoch 00027: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0560 - root_mean_squared_error: 0.2367 - val_loss: 0.0758 - val_root_mean_squared_error: 0.2753
Epoch 28/30
1027/1032 [============================>.] - ETA: 0s - loss: 0.0561 - root_mean_squared_error: 0.2369
Epoch 00028: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0560 - root_mean_squared_error: 0.2366 - val_loss: 0.0744 - val_root_mean_squared_error: 0.2727
Epoch 29/30
1031/1032 [============================>.] - ETA: 0s - loss: 0.0560 - root_mean_squared_error: 0.2367
Epoch 00029: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 7ms/step - loss: 0.0560 - root_mean_squared_error: 0.2367 - val_loss: 0.0747 - val_root_mean_squared_error: 0.2733
Epoch 30/30
1032/1032 [==============================] - ETA: 0s - loss: 0.0556 - root_mean_squared_error: 0.2359
Epoch 00030: val_loss did not improve from 0.07029
1032/1032 [==============================] - 7s 6ms/step - loss: 0.0556 - root_mean_squared_error: 0.2359 - val_loss: 0.0753 - val_root_mean_squared_error: 0.2744
ticks = [i for i in range(0, 101, 10)]
labels = [i for i in range(0, 101, 10)]
labels[0] = 1_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera6', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_8_camera6.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera6 = model_history('model_5_8_camera6')_____no_output_____model_2_camera8.head()_____no_output_____#################### end of training camera6 data for model 3_____no_output_____########################### start of train with camera6 data for model 3_____no_output_____camera9 = load('/content/drive/My Drive/datasets/camera9_cleaned.npz')
log9 = pd.read_csv('/content/drive/My Drive/datasets/log9_cleaned.csv')_____no_output_____camera_processing(camera9, 'camera9')Done
log_processing(log9, 'log9')Done
train_split('camera9', 'log9')Done
"""
new data workflow
camera2 = load('/content/drive/My Drive/datasets/camera2_cleaned.npz')
log2 = pd.read_csv('/content/drive/My Drive/datasets/log2_cleaned.csv')
camera_processing(camera2, 'camera2')
log_processing(log2, 'log2')
train_split('camera2', 'log2')
"""_____no_output_____model = load_model('/content/drive/My Drive/epochs/model_5_8_camera6.0004-0.0703.h5')_____no_output_____def train_load(camera_file_name):
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/datasets/{camera_file_name}_y_test.npz" ./y_test.npz
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
return X_train, X_test, y_train, y_test_____no_output_____X_train, X_test, y_train, y_test = train_load('camera7')_____no_output_____X_train.shape, X_test.shape, y_train.shape, y_test.shape_____no_output_____filepath = "/content/drive/My Drive/epochs/model_5_9_camera7.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Epoch 1/30
926/926 [==============================] - ETA: 0s - loss: 0.0390 - root_mean_squared_error: 0.1975
Epoch 00001: val_loss improved from inf to 0.03959, saving model to /content/drive/My Drive/epochs/model_5_9_camera7.0001-0.0396.h5
926/926 [==============================] - 6s 7ms/step - loss: 0.0390 - root_mean_squared_error: 0.1975 - val_loss: 0.0396 - val_root_mean_squared_error: 0.1990
Epoch 2/30
921/926 [============================>.] - ETA: 0s - loss: 0.0349 - root_mean_squared_error: 0.1869
Epoch 00002: val_loss improved from 0.03959 to 0.03933, saving model to /content/drive/My Drive/epochs/model_5_9_camera7.0002-0.0393.h5
926/926 [==============================] - 6s 7ms/step - loss: 0.0349 - root_mean_squared_error: 0.1869 - val_loss: 0.0393 - val_root_mean_squared_error: 0.1983
Epoch 3/30
920/926 [============================>.] - ETA: 0s - loss: 0.0340 - root_mean_squared_error: 0.1845
Epoch 00003: val_loss improved from 0.03933 to 0.03911, saving model to /content/drive/My Drive/epochs/model_5_9_camera7.0003-0.0391.h5
926/926 [==============================] - 6s 7ms/step - loss: 0.0341 - root_mean_squared_error: 0.1846 - val_loss: 0.0391 - val_root_mean_squared_error: 0.1978
Epoch 4/30
918/926 [============================>.] - ETA: 0s - loss: 0.0336 - root_mean_squared_error: 0.1832
Epoch 00004: val_loss improved from 0.03911 to 0.03889, saving model to /content/drive/My Drive/epochs/model_5_9_camera7.0004-0.0389.h5
926/926 [==============================] - 6s 7ms/step - loss: 0.0334 - root_mean_squared_error: 0.1827 - val_loss: 0.0389 - val_root_mean_squared_error: 0.1972
Epoch 5/30
920/926 [============================>.] - ETA: 0s - loss: 0.0326 - root_mean_squared_error: 0.1804
Epoch 00005: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0327 - root_mean_squared_error: 0.1808 - val_loss: 0.0391 - val_root_mean_squared_error: 0.1977
Epoch 6/30
923/926 [============================>.] - ETA: 0s - loss: 0.0323 - root_mean_squared_error: 0.1797
Epoch 00006: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0322 - root_mean_squared_error: 0.1795 - val_loss: 0.0395 - val_root_mean_squared_error: 0.1986
Epoch 7/30
923/926 [============================>.] - ETA: 0s - loss: 0.0319 - root_mean_squared_error: 0.1785
Epoch 00007: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0320 - root_mean_squared_error: 0.1788 - val_loss: 0.0392 - val_root_mean_squared_error: 0.1980
Epoch 8/30
924/926 [============================>.] - ETA: 0s - loss: 0.0312 - root_mean_squared_error: 0.1766
Epoch 00008: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0315 - root_mean_squared_error: 0.1776 - val_loss: 0.0396 - val_root_mean_squared_error: 0.1990
Epoch 9/30
926/926 [==============================] - ETA: 0s - loss: 0.0313 - root_mean_squared_error: 0.1769
Epoch 00009: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0313 - root_mean_squared_error: 0.1769 - val_loss: 0.0398 - val_root_mean_squared_error: 0.1996
Epoch 10/30
923/926 [============================>.] - ETA: 0s - loss: 0.0309 - root_mean_squared_error: 0.1759
Epoch 00010: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0309 - root_mean_squared_error: 0.1759 - val_loss: 0.0395 - val_root_mean_squared_error: 0.1988
Epoch 11/30
919/926 [============================>.] - ETA: 0s - loss: 0.0306 - root_mean_squared_error: 0.1749
Epoch 00011: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0307 - root_mean_squared_error: 0.1753 - val_loss: 0.0398 - val_root_mean_squared_error: 0.1995
Epoch 12/30
921/926 [============================>.] - ETA: 0s - loss: 0.0307 - root_mean_squared_error: 0.1752
Epoch 00012: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0306 - root_mean_squared_error: 0.1749 - val_loss: 0.0401 - val_root_mean_squared_error: 0.2002
Epoch 13/30
918/926 [============================>.] - ETA: 0s - loss: 0.0303 - root_mean_squared_error: 0.1741
Epoch 00013: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0302 - root_mean_squared_error: 0.1739 - val_loss: 0.0400 - val_root_mean_squared_error: 0.2000
Epoch 14/30
922/926 [============================>.] - ETA: 0s - loss: 0.0301 - root_mean_squared_error: 0.1735
Epoch 00014: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 6ms/step - loss: 0.0301 - root_mean_squared_error: 0.1734 - val_loss: 0.0402 - val_root_mean_squared_error: 0.2005
Epoch 15/30
922/926 [============================>.] - ETA: 0s - loss: 0.0299 - root_mean_squared_error: 0.1728
Epoch 00015: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0299 - root_mean_squared_error: 0.1728 - val_loss: 0.0405 - val_root_mean_squared_error: 0.2012
Epoch 16/30
917/926 [============================>.] - ETA: 0s - loss: 0.0297 - root_mean_squared_error: 0.1724
Epoch 00016: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0297 - root_mean_squared_error: 0.1722 - val_loss: 0.0407 - val_root_mean_squared_error: 0.2018
Epoch 17/30
925/926 [============================>.] - ETA: 0s - loss: 0.0295 - root_mean_squared_error: 0.1718
Epoch 00017: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0295 - root_mean_squared_error: 0.1718 - val_loss: 0.0405 - val_root_mean_squared_error: 0.2013
Epoch 18/30
922/926 [============================>.] - ETA: 0s - loss: 0.0291 - root_mean_squared_error: 0.1705
Epoch 00018: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0292 - root_mean_squared_error: 0.1708 - val_loss: 0.0410 - val_root_mean_squared_error: 0.2026
Epoch 19/30
925/926 [============================>.] - ETA: 0s - loss: 0.0292 - root_mean_squared_error: 0.1708
Epoch 00019: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0292 - root_mean_squared_error: 0.1707 - val_loss: 0.0411 - val_root_mean_squared_error: 0.2028
Epoch 20/30
918/926 [============================>.] - ETA: 0s - loss: 0.0290 - root_mean_squared_error: 0.1702
Epoch 00020: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0289 - root_mean_squared_error: 0.1700 - val_loss: 0.0417 - val_root_mean_squared_error: 0.2043
Epoch 21/30
919/926 [============================>.] - ETA: 0s - loss: 0.0289 - root_mean_squared_error: 0.1701
Epoch 00021: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 6ms/step - loss: 0.0288 - root_mean_squared_error: 0.1697 - val_loss: 0.0412 - val_root_mean_squared_error: 0.2031
Epoch 22/30
921/926 [============================>.] - ETA: 0s - loss: 0.0288 - root_mean_squared_error: 0.1696
Epoch 00022: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0288 - root_mean_squared_error: 0.1696 - val_loss: 0.0407 - val_root_mean_squared_error: 0.2018
Epoch 23/30
925/926 [============================>.] - ETA: 0s - loss: 0.0287 - root_mean_squared_error: 0.1693
Epoch 00023: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0286 - root_mean_squared_error: 0.1692 - val_loss: 0.0408 - val_root_mean_squared_error: 0.2019
Epoch 24/30
921/926 [============================>.] - ETA: 0s - loss: 0.0285 - root_mean_squared_error: 0.1688
Epoch 00024: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0285 - root_mean_squared_error: 0.1688 - val_loss: 0.0412 - val_root_mean_squared_error: 0.2030
Epoch 25/30
922/926 [============================>.] - ETA: 0s - loss: 0.0284 - root_mean_squared_error: 0.1684
Epoch 00025: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0284 - root_mean_squared_error: 0.1684 - val_loss: 0.0415 - val_root_mean_squared_error: 0.2037
Epoch 26/30
923/926 [============================>.] - ETA: 0s - loss: 0.0283 - root_mean_squared_error: 0.1682
Epoch 00026: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0283 - root_mean_squared_error: 0.1682 - val_loss: 0.0419 - val_root_mean_squared_error: 0.2046
Epoch 27/30
919/926 [============================>.] - ETA: 0s - loss: 0.0282 - root_mean_squared_error: 0.1680
Epoch 00027: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0282 - root_mean_squared_error: 0.1678 - val_loss: 0.0419 - val_root_mean_squared_error: 0.2046
Epoch 28/30
926/926 [==============================] - ETA: 0s - loss: 0.0280 - root_mean_squared_error: 0.1675
Epoch 00028: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0280 - root_mean_squared_error: 0.1675 - val_loss: 0.0423 - val_root_mean_squared_error: 0.2057
Epoch 29/30
921/926 [============================>.] - ETA: 0s - loss: 0.0279 - root_mean_squared_error: 0.1672
Epoch 00029: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0279 - root_mean_squared_error: 0.1671 - val_loss: 0.0417 - val_root_mean_squared_error: 0.2043
Epoch 30/30
918/926 [============================>.] - ETA: 0s - loss: 0.0278 - root_mean_squared_error: 0.1667
Epoch 00030: val_loss did not improve from 0.03889
926/926 [==============================] - 6s 7ms/step - loss: 0.0277 - root_mean_squared_error: 0.1666 - val_loss: 0.0432 - val_root_mean_squared_error: 0.2078
ticks = [i for i in range(0, 101, 10)]
labels = [i for i in range(0, 101, 10)]
labels[0] = 1_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera7', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model5_9_camera7.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_3_camera7 = model_history('model_5_9_camera7')_____no_output_____model_2_camera9.head()_____no_output_____#################### end of training camera9 data for model 1_____no_output_____####################### testing new model to see if I'm actually training on the same model_____no_output_____model = Sequential()
model.add(Conv2D(16, (3, 3), input_shape=(80, 160, 1), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(300, activation='relu'))
model.add(Dropout(.5))
model.add(Dense(100, activation='relu'))
model.add(Dropout(.25))
model.add(Dense(20, activation='relu'))
model.add(Dense(1))
model.compile(loss='mse', optimizer=Adam(lr=1e-04), metrics=[RootMeanSquaredError()])
filepath = "/content/drive/My Drive/epochs/model_1_camera9_standalone.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____model.compile(loss='mse', optimizer=Adam(lr=1e-04), metrics=[RootMeanSquaredError()])_____no_output_____filepath = "/content/drive/My Drive/epochs/model_1_camera9_standalone.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=100,
verbose=1,
callbacks=callbacks_list)Epoch 1/100
595/599 [============================>.] - ETA: 0s - loss: 0.0560 - root_mean_squared_error: 0.2366
Epoch 00001: val_loss improved from inf to 0.05401, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0001-0.0540.h5
599/599 [==============================] - 8s 13ms/step - loss: 0.0557 - root_mean_squared_error: 0.2359 - val_loss: 0.0540 - val_root_mean_squared_error: 0.2324
Epoch 2/100
597/599 [============================>.] - ETA: 0s - loss: 0.0551 - root_mean_squared_error: 0.2347
Epoch 00002: val_loss improved from 0.05401 to 0.05400, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0002-0.0540.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0552 - root_mean_squared_error: 0.2349 - val_loss: 0.0540 - val_root_mean_squared_error: 0.2324
Epoch 3/100
594/599 [============================>.] - ETA: 0s - loss: 0.0552 - root_mean_squared_error: 0.2349
Epoch 00003: val_loss improved from 0.05400 to 0.05400, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0003-0.0540.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0551 - root_mean_squared_error: 0.2347 - val_loss: 0.0540 - val_root_mean_squared_error: 0.2324
Epoch 4/100
595/599 [============================>.] - ETA: 0s - loss: 0.0547 - root_mean_squared_error: 0.2339
Epoch 00004: val_loss improved from 0.05400 to 0.05388, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0004-0.0539.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0544 - root_mean_squared_error: 0.2332 - val_loss: 0.0539 - val_root_mean_squared_error: 0.2321
Epoch 5/100
597/599 [============================>.] - ETA: 0s - loss: 0.0506 - root_mean_squared_error: 0.2249
Epoch 00005: val_loss improved from 0.05388 to 0.05236, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0005-0.0524.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0504 - root_mean_squared_error: 0.2246 - val_loss: 0.0524 - val_root_mean_squared_error: 0.2288
Epoch 6/100
595/599 [============================>.] - ETA: 0s - loss: 0.0479 - root_mean_squared_error: 0.2189
Epoch 00006: val_loss improved from 0.05236 to 0.04944, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0006-0.0494.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0478 - root_mean_squared_error: 0.2185 - val_loss: 0.0494 - val_root_mean_squared_error: 0.2224
Epoch 7/100
598/599 [============================>.] - ETA: 0s - loss: 0.0451 - root_mean_squared_error: 0.2124
Epoch 00007: val_loss improved from 0.04944 to 0.04866, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0007-0.0487.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0451 - root_mean_squared_error: 0.2123 - val_loss: 0.0487 - val_root_mean_squared_error: 0.2206
Epoch 8/100
597/599 [============================>.] - ETA: 0s - loss: 0.0429 - root_mean_squared_error: 0.2072
Epoch 00008: val_loss improved from 0.04866 to 0.04707, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0008-0.0471.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0431 - root_mean_squared_error: 0.2076 - val_loss: 0.0471 - val_root_mean_squared_error: 0.2170
Epoch 9/100
594/599 [============================>.] - ETA: 0s - loss: 0.0422 - root_mean_squared_error: 0.2055
Epoch 00009: val_loss did not improve from 0.04707
599/599 [==============================] - 7s 12ms/step - loss: 0.0422 - root_mean_squared_error: 0.2054 - val_loss: 0.0484 - val_root_mean_squared_error: 0.2200
Epoch 10/100
598/599 [============================>.] - ETA: 0s - loss: 0.0390 - root_mean_squared_error: 0.1975
Epoch 00010: val_loss improved from 0.04707 to 0.04410, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0010-0.0441.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0390 - root_mean_squared_error: 0.1975 - val_loss: 0.0441 - val_root_mean_squared_error: 0.2100
Epoch 11/100
598/599 [============================>.] - ETA: 0s - loss: 0.0388 - root_mean_squared_error: 0.1970
Epoch 00011: val_loss improved from 0.04410 to 0.04318, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0011-0.0432.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0388 - root_mean_squared_error: 0.1969 - val_loss: 0.0432 - val_root_mean_squared_error: 0.2078
Epoch 12/100
596/599 [============================>.] - ETA: 0s - loss: 0.0351 - root_mean_squared_error: 0.1873
Epoch 00012: val_loss improved from 0.04318 to 0.03872, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0012-0.0387.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0351 - root_mean_squared_error: 0.1873 - val_loss: 0.0387 - val_root_mean_squared_error: 0.1968
Epoch 13/100
595/599 [============================>.] - ETA: 0s - loss: 0.0337 - root_mean_squared_error: 0.1836
Epoch 00013: val_loss did not improve from 0.03872
599/599 [==============================] - 7s 12ms/step - loss: 0.0344 - root_mean_squared_error: 0.1854 - val_loss: 0.0426 - val_root_mean_squared_error: 0.2064
Epoch 14/100
597/599 [============================>.] - ETA: 0s - loss: 0.0306 - root_mean_squared_error: 0.1749
Epoch 00014: val_loss improved from 0.03872 to 0.03488, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0014-0.0349.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0308 - root_mean_squared_error: 0.1754 - val_loss: 0.0349 - val_root_mean_squared_error: 0.1868
Epoch 15/100
599/599 [==============================] - ETA: 0s - loss: 0.0291 - root_mean_squared_error: 0.1707
Epoch 00015: val_loss improved from 0.03488 to 0.02948, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0015-0.0295.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0291 - root_mean_squared_error: 0.1707 - val_loss: 0.0295 - val_root_mean_squared_error: 0.1717
Epoch 16/100
596/599 [============================>.] - ETA: 0s - loss: 0.0264 - root_mean_squared_error: 0.1625
Epoch 00016: val_loss improved from 0.02948 to 0.02898, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0016-0.0290.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0265 - root_mean_squared_error: 0.1629 - val_loss: 0.0290 - val_root_mean_squared_error: 0.1702
Epoch 17/100
595/599 [============================>.] - ETA: 0s - loss: 0.0257 - root_mean_squared_error: 0.1602
Epoch 00017: val_loss improved from 0.02898 to 0.02874, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0017-0.0287.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0256 - root_mean_squared_error: 0.1600 - val_loss: 0.0287 - val_root_mean_squared_error: 0.1695
Epoch 18/100
598/599 [============================>.] - ETA: 0s - loss: 0.0219 - root_mean_squared_error: 0.1480
Epoch 00018: val_loss improved from 0.02874 to 0.02825, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0018-0.0282.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0219 - root_mean_squared_error: 0.1481 - val_loss: 0.0282 - val_root_mean_squared_error: 0.1681
Epoch 19/100
595/599 [============================>.] - ETA: 0s - loss: 0.0212 - root_mean_squared_error: 0.1455
Epoch 00019: val_loss improved from 0.02825 to 0.02553, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0019-0.0255.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0211 - root_mean_squared_error: 0.1452 - val_loss: 0.0255 - val_root_mean_squared_error: 0.1598
Epoch 20/100
595/599 [============================>.] - ETA: 0s - loss: 0.0213 - root_mean_squared_error: 0.1460
Epoch 00020: val_loss did not improve from 0.02553
599/599 [==============================] - 7s 12ms/step - loss: 0.0212 - root_mean_squared_error: 0.1457 - val_loss: 0.0260 - val_root_mean_squared_error: 0.1612
Epoch 21/100
597/599 [============================>.] - ETA: 0s - loss: 0.0194 - root_mean_squared_error: 0.1395
Epoch 00021: val_loss improved from 0.02553 to 0.02488, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0021-0.0249.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0194 - root_mean_squared_error: 0.1394 - val_loss: 0.0249 - val_root_mean_squared_error: 0.1577
Epoch 22/100
595/599 [============================>.] - ETA: 0s - loss: 0.0176 - root_mean_squared_error: 0.1326
Epoch 00022: val_loss improved from 0.02488 to 0.02432, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0022-0.0243.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0175 - root_mean_squared_error: 0.1322 - val_loss: 0.0243 - val_root_mean_squared_error: 0.1560
Epoch 23/100
599/599 [==============================] - ETA: 0s - loss: 0.0174 - root_mean_squared_error: 0.1320
Epoch 00023: val_loss improved from 0.02432 to 0.02417, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0023-0.0242.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0174 - root_mean_squared_error: 0.1320 - val_loss: 0.0242 - val_root_mean_squared_error: 0.1555
Epoch 24/100
597/599 [============================>.] - ETA: 0s - loss: 0.0174 - root_mean_squared_error: 0.1320
Epoch 00024: val_loss improved from 0.02417 to 0.02046, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0024-0.0205.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0174 - root_mean_squared_error: 0.1319 - val_loss: 0.0205 - val_root_mean_squared_error: 0.1430
Epoch 25/100
595/599 [============================>.] - ETA: 0s - loss: 0.0176 - root_mean_squared_error: 0.1326
Epoch 00025: val_loss did not improve from 0.02046
599/599 [==============================] - 7s 12ms/step - loss: 0.0175 - root_mean_squared_error: 0.1323 - val_loss: 0.0209 - val_root_mean_squared_error: 0.1444
Epoch 26/100
596/599 [============================>.] - ETA: 0s - loss: 0.0161 - root_mean_squared_error: 0.1267
Epoch 00026: val_loss improved from 0.02046 to 0.01855, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0026-0.0185.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0160 - root_mean_squared_error: 0.1266 - val_loss: 0.0185 - val_root_mean_squared_error: 0.1362
Epoch 27/100
599/599 [==============================] - ETA: 0s - loss: 0.0164 - root_mean_squared_error: 0.1279
Epoch 00027: val_loss improved from 0.01855 to 0.01844, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0027-0.0184.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0164 - root_mean_squared_error: 0.1279 - val_loss: 0.0184 - val_root_mean_squared_error: 0.1358
Epoch 28/100
599/599 [==============================] - ETA: 0s - loss: 0.0144 - root_mean_squared_error: 0.1200
Epoch 00028: val_loss improved from 0.01844 to 0.01764, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0028-0.0176.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0144 - root_mean_squared_error: 0.1200 - val_loss: 0.0176 - val_root_mean_squared_error: 0.1328
Epoch 29/100
597/599 [============================>.] - ETA: 0s - loss: 0.0138 - root_mean_squared_error: 0.1175
Epoch 00029: val_loss did not improve from 0.01764
599/599 [==============================] - 7s 12ms/step - loss: 0.0138 - root_mean_squared_error: 0.1174 - val_loss: 0.0180 - val_root_mean_squared_error: 0.1343
Epoch 30/100
596/599 [============================>.] - ETA: 0s - loss: 0.0136 - root_mean_squared_error: 0.1167
Epoch 00030: val_loss did not improve from 0.01764
599/599 [==============================] - 7s 12ms/step - loss: 0.0136 - root_mean_squared_error: 0.1166 - val_loss: 0.0182 - val_root_mean_squared_error: 0.1351
Epoch 31/100
597/599 [============================>.] - ETA: 0s - loss: 0.0130 - root_mean_squared_error: 0.1140
Epoch 00031: val_loss did not improve from 0.01764
599/599 [==============================] - 7s 12ms/step - loss: 0.0133 - root_mean_squared_error: 0.1152 - val_loss: 0.0184 - val_root_mean_squared_error: 0.1355
Epoch 32/100
596/599 [============================>.] - ETA: 0s - loss: 0.0128 - root_mean_squared_error: 0.1133
Epoch 00032: val_loss improved from 0.01764 to 0.01658, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0032-0.0166.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0129 - root_mean_squared_error: 0.1135 - val_loss: 0.0166 - val_root_mean_squared_error: 0.1288
Epoch 33/100
594/599 [============================>.] - ETA: 0s - loss: 0.0137 - root_mean_squared_error: 0.1171
Epoch 00033: val_loss improved from 0.01658 to 0.01639, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0033-0.0164.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0138 - root_mean_squared_error: 0.1175 - val_loss: 0.0164 - val_root_mean_squared_error: 0.1280
Epoch 34/100
598/599 [============================>.] - ETA: 0s - loss: 0.0130 - root_mean_squared_error: 0.1139
Epoch 00034: val_loss improved from 0.01639 to 0.01585, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0034-0.0159.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0130 - root_mean_squared_error: 0.1139 - val_loss: 0.0159 - val_root_mean_squared_error: 0.1259
Epoch 35/100
595/599 [============================>.] - ETA: 0s - loss: 0.0125 - root_mean_squared_error: 0.1118
Epoch 00035: val_loss did not improve from 0.01585
599/599 [==============================] - 7s 12ms/step - loss: 0.0125 - root_mean_squared_error: 0.1118 - val_loss: 0.0166 - val_root_mean_squared_error: 0.1290
Epoch 36/100
598/599 [============================>.] - ETA: 0s - loss: 0.0125 - root_mean_squared_error: 0.1116
Epoch 00036: val_loss did not improve from 0.01585
599/599 [==============================] - 7s 12ms/step - loss: 0.0125 - root_mean_squared_error: 0.1116 - val_loss: 0.0163 - val_root_mean_squared_error: 0.1277
Epoch 37/100
599/599 [==============================] - ETA: 0s - loss: 0.0127 - root_mean_squared_error: 0.1128
Epoch 00037: val_loss did not improve from 0.01585
599/599 [==============================] - 7s 12ms/step - loss: 0.0127 - root_mean_squared_error: 0.1128 - val_loss: 0.0163 - val_root_mean_squared_error: 0.1275
Epoch 38/100
598/599 [============================>.] - ETA: 0s - loss: 0.0111 - root_mean_squared_error: 0.1054
Epoch 00038: val_loss improved from 0.01585 to 0.01574, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0038-0.0157.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0111 - root_mean_squared_error: 0.1053 - val_loss: 0.0157 - val_root_mean_squared_error: 0.1254
Epoch 39/100
597/599 [============================>.] - ETA: 0s - loss: 0.0112 - root_mean_squared_error: 0.1058
Epoch 00039: val_loss did not improve from 0.01574
599/599 [==============================] - 7s 12ms/step - loss: 0.0112 - root_mean_squared_error: 0.1057 - val_loss: 0.0163 - val_root_mean_squared_error: 0.1275
Epoch 40/100
596/599 [============================>.] - ETA: 0s - loss: 0.0115 - root_mean_squared_error: 0.1075
Epoch 00040: val_loss did not improve from 0.01574
599/599 [==============================] - 7s 12ms/step - loss: 0.0115 - root_mean_squared_error: 0.1073 - val_loss: 0.0168 - val_root_mean_squared_error: 0.1295
Epoch 41/100
595/599 [============================>.] - ETA: 0s - loss: 0.0117 - root_mean_squared_error: 0.1081
Epoch 00041: val_loss did not improve from 0.01574
599/599 [==============================] - 7s 12ms/step - loss: 0.0117 - root_mean_squared_error: 0.1080 - val_loss: 0.0163 - val_root_mean_squared_error: 0.1275
Epoch 42/100
596/599 [============================>.] - ETA: 0s - loss: 0.0111 - root_mean_squared_error: 0.1051
Epoch 00042: val_loss improved from 0.01574 to 0.01504, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0042-0.0150.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0111 - root_mean_squared_error: 0.1056 - val_loss: 0.0150 - val_root_mean_squared_error: 0.1226
Epoch 43/100
597/599 [============================>.] - ETA: 0s - loss: 0.0105 - root_mean_squared_error: 0.1025
Epoch 00043: val_loss did not improve from 0.01504
599/599 [==============================] - 7s 12ms/step - loss: 0.0105 - root_mean_squared_error: 0.1024 - val_loss: 0.0177 - val_root_mean_squared_error: 0.1329
Epoch 44/100
597/599 [============================>.] - ETA: 0s - loss: 0.0104 - root_mean_squared_error: 0.1018
Epoch 00044: val_loss improved from 0.01504 to 0.01477, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0044-0.0148.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0104 - root_mean_squared_error: 0.1020 - val_loss: 0.0148 - val_root_mean_squared_error: 0.1215
Epoch 45/100
598/599 [============================>.] - ETA: 0s - loss: 0.0104 - root_mean_squared_error: 0.1021
Epoch 00045: val_loss improved from 0.01477 to 0.01433, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0045-0.0143.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0104 - root_mean_squared_error: 0.1021 - val_loss: 0.0143 - val_root_mean_squared_error: 0.1197
Epoch 46/100
596/599 [============================>.] - ETA: 0s - loss: 0.0100 - root_mean_squared_error: 0.1000
Epoch 00046: val_loss did not improve from 0.01433
599/599 [==============================] - 7s 12ms/step - loss: 0.0100 - root_mean_squared_error: 0.1001 - val_loss: 0.0146 - val_root_mean_squared_error: 0.1207
Epoch 47/100
597/599 [============================>.] - ETA: 0s - loss: 0.0103 - root_mean_squared_error: 0.1016
Epoch 00047: val_loss did not improve from 0.01433
599/599 [==============================] - 7s 12ms/step - loss: 0.0103 - root_mean_squared_error: 0.1016 - val_loss: 0.0160 - val_root_mean_squared_error: 0.1263
Epoch 48/100
598/599 [============================>.] - ETA: 0s - loss: 0.0094 - root_mean_squared_error: 0.0972
Epoch 00048: val_loss improved from 0.01433 to 0.01258, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0048-0.0126.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0095 - root_mean_squared_error: 0.0974 - val_loss: 0.0126 - val_root_mean_squared_error: 0.1122
Epoch 49/100
595/599 [============================>.] - ETA: 0s - loss: 0.0094 - root_mean_squared_error: 0.0972
Epoch 00049: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0094 - root_mean_squared_error: 0.0970 - val_loss: 0.0150 - val_root_mean_squared_error: 0.1224
Epoch 50/100
597/599 [============================>.] - ETA: 0s - loss: 0.0094 - root_mean_squared_error: 0.0968
Epoch 00050: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0094 - root_mean_squared_error: 0.0969 - val_loss: 0.0131 - val_root_mean_squared_error: 0.1145
Epoch 51/100
597/599 [============================>.] - ETA: 0s - loss: 0.0097 - root_mean_squared_error: 0.0983
Epoch 00051: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0096 - root_mean_squared_error: 0.0982 - val_loss: 0.0143 - val_root_mean_squared_error: 0.1197
Epoch 52/100
598/599 [============================>.] - ETA: 0s - loss: 0.0093 - root_mean_squared_error: 0.0965
Epoch 00052: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0093 - root_mean_squared_error: 0.0964 - val_loss: 0.0134 - val_root_mean_squared_error: 0.1158
Epoch 53/100
599/599 [==============================] - ETA: 0s - loss: 0.0098 - root_mean_squared_error: 0.0989
Epoch 00053: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0098 - root_mean_squared_error: 0.0989 - val_loss: 0.0126 - val_root_mean_squared_error: 0.1125
Epoch 54/100
598/599 [============================>.] - ETA: 0s - loss: 0.0087 - root_mean_squared_error: 0.0933
Epoch 00054: val_loss did not improve from 0.01258
599/599 [==============================] - 7s 12ms/step - loss: 0.0087 - root_mean_squared_error: 0.0933 - val_loss: 0.0143 - val_root_mean_squared_error: 0.1196
Epoch 55/100
598/599 [============================>.] - ETA: 0s - loss: 0.0084 - root_mean_squared_error: 0.0917
Epoch 00055: val_loss improved from 0.01258 to 0.01195, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0055-0.0120.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0084 - root_mean_squared_error: 0.0917 - val_loss: 0.0120 - val_root_mean_squared_error: 0.1093
Epoch 56/100
596/599 [============================>.] - ETA: 0s - loss: 0.0092 - root_mean_squared_error: 0.0961
Epoch 00056: val_loss did not improve from 0.01195
599/599 [==============================] - 7s 12ms/step - loss: 0.0092 - root_mean_squared_error: 0.0960 - val_loss: 0.0147 - val_root_mean_squared_error: 0.1213
Epoch 57/100
597/599 [============================>.] - ETA: 0s - loss: 0.0085 - root_mean_squared_error: 0.0920
Epoch 00057: val_loss did not improve from 0.01195
599/599 [==============================] - 7s 12ms/step - loss: 0.0084 - root_mean_squared_error: 0.0919 - val_loss: 0.0123 - val_root_mean_squared_error: 0.1111
Epoch 58/100
596/599 [============================>.] - ETA: 0s - loss: 0.0083 - root_mean_squared_error: 0.0911
Epoch 00058: val_loss did not improve from 0.01195
599/599 [==============================] - 7s 12ms/step - loss: 0.0083 - root_mean_squared_error: 0.0910 - val_loss: 0.0124 - val_root_mean_squared_error: 0.1112
Epoch 59/100
594/599 [============================>.] - ETA: 0s - loss: 0.0088 - root_mean_squared_error: 0.0939
Epoch 00059: val_loss did not improve from 0.01195
599/599 [==============================] - 7s 12ms/step - loss: 0.0088 - root_mean_squared_error: 0.0937 - val_loss: 0.0134 - val_root_mean_squared_error: 0.1157
Epoch 60/100
598/599 [============================>.] - ETA: 0s - loss: 0.0087 - root_mean_squared_error: 0.0930
Epoch 00060: val_loss did not improve from 0.01195
599/599 [==============================] - 7s 12ms/step - loss: 0.0086 - root_mean_squared_error: 0.0930 - val_loss: 0.0139 - val_root_mean_squared_error: 0.1177
Epoch 61/100
597/599 [============================>.] - ETA: 0s - loss: 0.0084 - root_mean_squared_error: 0.0919
Epoch 00061: val_loss improved from 0.01195 to 0.01176, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0061-0.0118.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0084 - root_mean_squared_error: 0.0918 - val_loss: 0.0118 - val_root_mean_squared_error: 0.1085
Epoch 62/100
597/599 [============================>.] - ETA: 0s - loss: 0.0082 - root_mean_squared_error: 0.0907
Epoch 00062: val_loss did not improve from 0.01176
599/599 [==============================] - 7s 12ms/step - loss: 0.0082 - root_mean_squared_error: 0.0907 - val_loss: 0.0133 - val_root_mean_squared_error: 0.1152
Epoch 63/100
596/599 [============================>.] - ETA: 0s - loss: 0.0081 - root_mean_squared_error: 0.0901
Epoch 00063: val_loss improved from 0.01176 to 0.01075, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0063-0.0107.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0081 - root_mean_squared_error: 0.0901 - val_loss: 0.0107 - val_root_mean_squared_error: 0.1037
Epoch 64/100
595/599 [============================>.] - ETA: 0s - loss: 0.0081 - root_mean_squared_error: 0.0901
Epoch 00064: val_loss did not improve from 0.01075
599/599 [==============================] - 7s 12ms/step - loss: 0.0081 - root_mean_squared_error: 0.0899 - val_loss: 0.0126 - val_root_mean_squared_error: 0.1123
Epoch 65/100
595/599 [============================>.] - ETA: 0s - loss: 0.0076 - root_mean_squared_error: 0.0871
Epoch 00065: val_loss did not improve from 0.01075
599/599 [==============================] - 7s 12ms/step - loss: 0.0076 - root_mean_squared_error: 0.0870 - val_loss: 0.0115 - val_root_mean_squared_error: 0.1070
Epoch 66/100
595/599 [============================>.] - ETA: 0s - loss: 0.0081 - root_mean_squared_error: 0.0900
Epoch 00066: val_loss did not improve from 0.01075
599/599 [==============================] - 7s 12ms/step - loss: 0.0081 - root_mean_squared_error: 0.0900 - val_loss: 0.0122 - val_root_mean_squared_error: 0.1104
Epoch 67/100
595/599 [============================>.] - ETA: 0s - loss: 0.0077 - root_mean_squared_error: 0.0877
Epoch 00067: val_loss did not improve from 0.01075
599/599 [==============================] - 7s 12ms/step - loss: 0.0077 - root_mean_squared_error: 0.0877 - val_loss: 0.0122 - val_root_mean_squared_error: 0.1102
Epoch 68/100
598/599 [============================>.] - ETA: 0s - loss: 0.0080 - root_mean_squared_error: 0.0896
Epoch 00068: val_loss did not improve from 0.01075
599/599 [==============================] - 7s 12ms/step - loss: 0.0080 - root_mean_squared_error: 0.0896 - val_loss: 0.0135 - val_root_mean_squared_error: 0.1162
Epoch 69/100
599/599 [==============================] - ETA: 0s - loss: 0.0073 - root_mean_squared_error: 0.0856
Epoch 00069: val_loss improved from 0.01075 to 0.01010, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0069-0.0101.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0073 - root_mean_squared_error: 0.0856 - val_loss: 0.0101 - val_root_mean_squared_error: 0.1005
Epoch 70/100
595/599 [============================>.] - ETA: 0s - loss: 0.0070 - root_mean_squared_error: 0.0839
Epoch 00070: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0071 - root_mean_squared_error: 0.0843 - val_loss: 0.0104 - val_root_mean_squared_error: 0.1021
Epoch 71/100
595/599 [============================>.] - ETA: 0s - loss: 0.0080 - root_mean_squared_error: 0.0895
Epoch 00071: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0080 - root_mean_squared_error: 0.0893 - val_loss: 0.0123 - val_root_mean_squared_error: 0.1110
Epoch 72/100
596/599 [============================>.] - ETA: 0s - loss: 0.0074 - root_mean_squared_error: 0.0863
Epoch 00072: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0074 - root_mean_squared_error: 0.0862 - val_loss: 0.0124 - val_root_mean_squared_error: 0.1112
Epoch 73/100
595/599 [============================>.] - ETA: 0s - loss: 0.0071 - root_mean_squared_error: 0.0845
Epoch 00073: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0072 - root_mean_squared_error: 0.0847 - val_loss: 0.0115 - val_root_mean_squared_error: 0.1075
Epoch 74/100
599/599 [==============================] - ETA: 0s - loss: 0.0072 - root_mean_squared_error: 0.0850
Epoch 00074: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0072 - root_mean_squared_error: 0.0850 - val_loss: 0.0133 - val_root_mean_squared_error: 0.1152
Epoch 75/100
596/599 [============================>.] - ETA: 0s - loss: 0.0069 - root_mean_squared_error: 0.0833
Epoch 00075: val_loss did not improve from 0.01010
599/599 [==============================] - 7s 12ms/step - loss: 0.0070 - root_mean_squared_error: 0.0834 - val_loss: 0.0116 - val_root_mean_squared_error: 0.1077
Epoch 76/100
599/599 [==============================] - ETA: 0s - loss: 0.0071 - root_mean_squared_error: 0.0841
Epoch 00076: val_loss improved from 0.01010 to 0.00983, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0076-0.0098.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0071 - root_mean_squared_error: 0.0841 - val_loss: 0.0098 - val_root_mean_squared_error: 0.0992
Epoch 77/100
596/599 [============================>.] - ETA: 0s - loss: 0.0066 - root_mean_squared_error: 0.0813
Epoch 00077: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0066 - root_mean_squared_error: 0.0813 - val_loss: 0.0120 - val_root_mean_squared_error: 0.1094
Epoch 78/100
598/599 [============================>.] - ETA: 0s - loss: 0.0072 - root_mean_squared_error: 0.0850
Epoch 00078: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0072 - root_mean_squared_error: 0.0850 - val_loss: 0.0105 - val_root_mean_squared_error: 0.1024
Epoch 79/100
595/599 [============================>.] - ETA: 0s - loss: 0.0078 - root_mean_squared_error: 0.0885
Epoch 00079: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0078 - root_mean_squared_error: 0.0883 - val_loss: 0.0109 - val_root_mean_squared_error: 0.1045
Epoch 80/100
596/599 [============================>.] - ETA: 0s - loss: 0.0064 - root_mean_squared_error: 0.0798
Epoch 00080: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0064 - root_mean_squared_error: 0.0803 - val_loss: 0.0109 - val_root_mean_squared_error: 0.1043
Epoch 81/100
598/599 [============================>.] - ETA: 0s - loss: 0.0062 - root_mean_squared_error: 0.0789
Epoch 00081: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0062 - root_mean_squared_error: 0.0789 - val_loss: 0.0111 - val_root_mean_squared_error: 0.1051
Epoch 82/100
596/599 [============================>.] - ETA: 0s - loss: 0.0067 - root_mean_squared_error: 0.0819
Epoch 00082: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 12ms/step - loss: 0.0067 - root_mean_squared_error: 0.0817 - val_loss: 0.0110 - val_root_mean_squared_error: 0.1050
Epoch 83/100
594/599 [============================>.] - ETA: 0s - loss: 0.0066 - root_mean_squared_error: 0.0814
Epoch 00083: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 11ms/step - loss: 0.0066 - root_mean_squared_error: 0.0812 - val_loss: 0.0104 - val_root_mean_squared_error: 0.1019
Epoch 84/100
596/599 [============================>.] - ETA: 0s - loss: 0.0070 - root_mean_squared_error: 0.0838
Epoch 00084: val_loss did not improve from 0.00983
599/599 [==============================] - 7s 11ms/step - loss: 0.0071 - root_mean_squared_error: 0.0840 - val_loss: 0.0116 - val_root_mean_squared_error: 0.1078
Epoch 85/100
595/599 [============================>.] - ETA: 0s - loss: 0.0060 - root_mean_squared_error: 0.0773
Epoch 00085: val_loss improved from 0.00983 to 0.00980, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0085-0.0098.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0060 - root_mean_squared_error: 0.0774 - val_loss: 0.0098 - val_root_mean_squared_error: 0.0990
Epoch 86/100
595/599 [============================>.] - ETA: 0s - loss: 0.0064 - root_mean_squared_error: 0.0797
Epoch 00086: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0063 - root_mean_squared_error: 0.0797 - val_loss: 0.0119 - val_root_mean_squared_error: 0.1093
Epoch 87/100
596/599 [============================>.] - ETA: 0s - loss: 0.0061 - root_mean_squared_error: 0.0782
Epoch 00087: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0061 - root_mean_squared_error: 0.0782 - val_loss: 0.0106 - val_root_mean_squared_error: 0.1029
Epoch 88/100
597/599 [============================>.] - ETA: 0s - loss: 0.0067 - root_mean_squared_error: 0.0816
Epoch 00088: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0067 - root_mean_squared_error: 0.0818 - val_loss: 0.0102 - val_root_mean_squared_error: 0.1011
Epoch 89/100
597/599 [============================>.] - ETA: 0s - loss: 0.0060 - root_mean_squared_error: 0.0772
Epoch 00089: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0060 - root_mean_squared_error: 0.0772 - val_loss: 0.0102 - val_root_mean_squared_error: 0.1012
Epoch 90/100
595/599 [============================>.] - ETA: 0s - loss: 0.0060 - root_mean_squared_error: 0.0772
Epoch 00090: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0060 - root_mean_squared_error: 0.0772 - val_loss: 0.0118 - val_root_mean_squared_error: 0.1085
Epoch 91/100
594/599 [============================>.] - ETA: 0s - loss: 0.0060 - root_mean_squared_error: 0.0776
Epoch 00091: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0060 - root_mean_squared_error: 0.0774 - val_loss: 0.0108 - val_root_mean_squared_error: 0.1037
Epoch 92/100
596/599 [============================>.] - ETA: 0s - loss: 0.0060 - root_mean_squared_error: 0.0773
Epoch 00092: val_loss did not improve from 0.00980
599/599 [==============================] - 7s 12ms/step - loss: 0.0060 - root_mean_squared_error: 0.0774 - val_loss: 0.0114 - val_root_mean_squared_error: 0.1066
Epoch 93/100
598/599 [============================>.] - ETA: 0s - loss: 0.0057 - root_mean_squared_error: 0.0757
Epoch 00093: val_loss improved from 0.00980 to 0.00964, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0093-0.0096.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0057 - root_mean_squared_error: 0.0757 - val_loss: 0.0096 - val_root_mean_squared_error: 0.0982
Epoch 94/100
597/599 [============================>.] - ETA: 0s - loss: 0.0061 - root_mean_squared_error: 0.0784
Epoch 00094: val_loss did not improve from 0.00964
599/599 [==============================] - 7s 12ms/step - loss: 0.0061 - root_mean_squared_error: 0.0783 - val_loss: 0.0105 - val_root_mean_squared_error: 0.1023
Epoch 95/100
597/599 [============================>.] - ETA: 0s - loss: 0.0061 - root_mean_squared_error: 0.0780
Epoch 00095: val_loss did not improve from 0.00964
599/599 [==============================] - 7s 12ms/step - loss: 0.0061 - root_mean_squared_error: 0.0781 - val_loss: 0.0102 - val_root_mean_squared_error: 0.1010
Epoch 96/100
595/599 [============================>.] - ETA: 0s - loss: 0.0062 - root_mean_squared_error: 0.0790
Epoch 00096: val_loss did not improve from 0.00964
599/599 [==============================] - 7s 12ms/step - loss: 0.0062 - root_mean_squared_error: 0.0788 - val_loss: 0.0102 - val_root_mean_squared_error: 0.1009
Epoch 97/100
595/599 [============================>.] - ETA: 0s - loss: 0.0057 - root_mean_squared_error: 0.0752
Epoch 00097: val_loss did not improve from 0.00964
599/599 [==============================] - 7s 12ms/step - loss: 0.0056 - root_mean_squared_error: 0.0751 - val_loss: 0.0101 - val_root_mean_squared_error: 0.1007
Epoch 98/100
596/599 [============================>.] - ETA: 0s - loss: 0.0058 - root_mean_squared_error: 0.0761
Epoch 00098: val_loss improved from 0.00964 to 0.00963, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0098-0.0096.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0059 - root_mean_squared_error: 0.0765 - val_loss: 0.0096 - val_root_mean_squared_error: 0.0981
Epoch 99/100
599/599 [==============================] - ETA: 0s - loss: 0.0059 - root_mean_squared_error: 0.0771
Epoch 00099: val_loss improved from 0.00963 to 0.00910, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0099-0.0091.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0059 - root_mean_squared_error: 0.0771 - val_loss: 0.0091 - val_root_mean_squared_error: 0.0954
Epoch 100/100
595/599 [============================>.] - ETA: 0s - loss: 0.0059 - root_mean_squared_error: 0.0766
Epoch 00100: val_loss improved from 0.00910 to 0.00858, saving model to /content/drive/My Drive/epochs/model_1_camera9_standalone.0100-0.0086.h5
599/599 [==============================] - 7s 12ms/step - loss: 0.0059 - root_mean_squared_error: 0.0767 - val_loss: 0.0086 - val_root_mean_squared_error: 0.0926
ticks = [i for i in range(0, 101, 10)]
labels = [i for i in range(0, 101, 10)]
labels[0] = 1_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera9', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18)
plt.savefig('/content/drive/My Drive/images/train_test_loss_model1_camera9_standalone.png');_____no_output_____def model_history(model_name):
model = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model.to_csv(f'/content/drive/My Drive/datasets/{model_name}.csv', index=False)
return model_____no_output_____model_1_camera9 = model_history('model_1_camera9_standalone')_____no_output_____model_1_camera9.head()_____no_output_____#################### end of training camera9 data for model 1_____no_output_____model = Sequential()
model.add(Conv2D(16, (3, 3), input_shape=(80, 160, 1), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(300, activation='relu'))
model.add(Dropout(.5))
model.add(Dense(100, activation='relu'))
model.add(Dropout(.25))
model.add(Dense(20, activation='relu'))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam', metrics=[RootMeanSquaredError()])
from keras.callbacks import *
filepath = "/content/drive/My Drive/model_1_shuffled_redropout.{epoch:03d}-{val_loss:.3f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]
history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=15,
verbose=1,
callbacks=callbacks_list)_____no_output_____model = Sequential()
model.add(Conv2D(16, (3, 3), input_shape=(80, 160, 1), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(300, activation='relu'))
model.add(Dropout(.5))
model.add(Dense(100, activation='relu'))
model.add(Dropout(.25))
model.add(Dense(20, activation='relu'))
model.add(Dense(1))_____no_output_____model.compile(loss='mse', optimizer=Adam(lr=1e-04), metrics=[RootMeanSquaredError()])_____no_output_____from keras.callbacks import *
filepath = "/content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.{epoch:04d}-{val_loss:.4f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]_____no_output_____history = model.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=100,
verbose=1,
callbacks=callbacks_list)Epoch 1/100
637/641 [============================>.] - ETA: 0s - loss: 0.2557 - root_mean_squared_error: 0.5057
Epoch 00001: val_loss improved from inf to 0.23474, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0001-0.2347.h5
641/641 [==============================] - 9s 13ms/step - loss: 0.2548 - root_mean_squared_error: 0.5047 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 2/100
640/641 [============================>.] - ETA: 0s - loss: 0.2531 - root_mean_squared_error: 0.5031
Epoch 00002: val_loss did not improve from 0.23474
641/641 [==============================] - 8s 12ms/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 3/100
636/641 [============================>.] - ETA: 0s - loss: 0.2505 - root_mean_squared_error: 0.5005
Epoch 00003: val_loss did not improve from 0.23474
641/641 [==============================] - 8s 12ms/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 4/100
637/641 [============================>.] - ETA: 0s - loss: 0.2536 - root_mean_squared_error: 0.5036
Epoch 00004: val_loss did not improve from 0.23474
641/641 [==============================] - 8s 12ms/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 5/100
639/641 [============================>.] - ETA: 0s - loss: 0.2524 - root_mean_squared_error: 0.5024
Epoch 00005: val_loss improved from 0.23474 to 0.23471, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0005-0.2347.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 6/100
640/641 [============================>.] - ETA: 0s - loss: 0.2528 - root_mean_squared_error: 0.5028
Epoch 00006: val_loss improved from 0.23471 to 0.23468, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0006-0.2347.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2526 - root_mean_squared_error: 0.5026 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4844
Epoch 7/100
638/641 [============================>.] - ETA: 0s - loss: 0.2516 - root_mean_squared_error: 0.5016
Epoch 00007: val_loss improved from 0.23468 to 0.23409, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0007-0.2341.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2518 - root_mean_squared_error: 0.5018 - val_loss: 0.2341 - val_root_mean_squared_error: 0.4838
Epoch 8/100
641/641 [==============================] - ETA: 0s - loss: 0.2504 - root_mean_squared_error: 0.5004
Epoch 00008: val_loss improved from 0.23409 to 0.23267, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0008-0.2327.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2504 - root_mean_squared_error: 0.5004 - val_loss: 0.2327 - val_root_mean_squared_error: 0.4824
Epoch 9/100
640/641 [============================>.] - ETA: 0s - loss: 0.2454 - root_mean_squared_error: 0.4954
Epoch 00009: val_loss improved from 0.23267 to 0.22157, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0009-0.2216.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2452 - root_mean_squared_error: 0.4951 - val_loss: 0.2216 - val_root_mean_squared_error: 0.4707
Epoch 10/100
639/641 [============================>.] - ETA: 0s - loss: 0.2307 - root_mean_squared_error: 0.4803
Epoch 00010: val_loss improved from 0.22157 to 0.20416, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0010-0.2042.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2302 - root_mean_squared_error: 0.4798 - val_loss: 0.2042 - val_root_mean_squared_error: 0.4518
Epoch 11/100
636/641 [============================>.] - ETA: 0s - loss: 0.2148 - root_mean_squared_error: 0.4635
Epoch 00011: val_loss improved from 0.20416 to 0.18700, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0011-0.1870.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.2147 - root_mean_squared_error: 0.4634 - val_loss: 0.1870 - val_root_mean_squared_error: 0.4324
Epoch 12/100
640/641 [============================>.] - ETA: 0s - loss: 0.1939 - root_mean_squared_error: 0.4404
Epoch 00012: val_loss improved from 0.18700 to 0.17739, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0012-0.1774.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1939 - root_mean_squared_error: 0.4403 - val_loss: 0.1774 - val_root_mean_squared_error: 0.4212
Epoch 13/100
641/641 [==============================] - ETA: 0s - loss: 0.1851 - root_mean_squared_error: 0.4302
Epoch 00013: val_loss improved from 0.17739 to 0.17037, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0013-0.1704.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1851 - root_mean_squared_error: 0.4302 - val_loss: 0.1704 - val_root_mean_squared_error: 0.4128
Epoch 14/100
638/641 [============================>.] - ETA: 0s - loss: 0.1747 - root_mean_squared_error: 0.4180
Epoch 00014: val_loss improved from 0.17037 to 0.15766, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0014-0.1577.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1745 - root_mean_squared_error: 0.4177 - val_loss: 0.1577 - val_root_mean_squared_error: 0.3971
Epoch 15/100
638/641 [============================>.] - ETA: 0s - loss: 0.1619 - root_mean_squared_error: 0.4024
Epoch 00015: val_loss improved from 0.15766 to 0.15265, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0015-0.1527.h5
641/641 [==============================] - 8s 13ms/step - loss: 0.1623 - root_mean_squared_error: 0.4029 - val_loss: 0.1527 - val_root_mean_squared_error: 0.3907
Epoch 16/100
639/641 [============================>.] - ETA: 0s - loss: 0.1521 - root_mean_squared_error: 0.3900
Epoch 00016: val_loss improved from 0.15265 to 0.14152, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0016-0.1415.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1519 - root_mean_squared_error: 0.3898 - val_loss: 0.1415 - val_root_mean_squared_error: 0.3762
Epoch 17/100
637/641 [============================>.] - ETA: 0s - loss: 0.1425 - root_mean_squared_error: 0.3775
Epoch 00017: val_loss improved from 0.14152 to 0.13354, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0017-0.1335.h5
641/641 [==============================] - 8s 13ms/step - loss: 0.1419 - root_mean_squared_error: 0.3767 - val_loss: 0.1335 - val_root_mean_squared_error: 0.3654
Epoch 18/100
637/641 [============================>.] - ETA: 0s - loss: 0.1365 - root_mean_squared_error: 0.3695
Epoch 00018: val_loss improved from 0.13354 to 0.12435, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0018-0.1243.h5
641/641 [==============================] - 8s 13ms/step - loss: 0.1362 - root_mean_squared_error: 0.3691 - val_loss: 0.1243 - val_root_mean_squared_error: 0.3526
Epoch 19/100
639/641 [============================>.] - ETA: 0s - loss: 0.1246 - root_mean_squared_error: 0.3530
Epoch 00019: val_loss improved from 0.12435 to 0.11462, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0019-0.1146.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1244 - root_mean_squared_error: 0.3527 - val_loss: 0.1146 - val_root_mean_squared_error: 0.3386
Epoch 20/100
638/641 [============================>.] - ETA: 0s - loss: 0.1228 - root_mean_squared_error: 0.3504
Epoch 00020: val_loss improved from 0.11462 to 0.10926, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0020-0.1093.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1224 - root_mean_squared_error: 0.3498 - val_loss: 0.1093 - val_root_mean_squared_error: 0.3305
Epoch 21/100
638/641 [============================>.] - ETA: 0s - loss: 0.1091 - root_mean_squared_error: 0.3303
Epoch 00021: val_loss improved from 0.10926 to 0.10532, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0021-0.1053.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1089 - root_mean_squared_error: 0.3300 - val_loss: 0.1053 - val_root_mean_squared_error: 0.3245
Epoch 22/100
639/641 [============================>.] - ETA: 0s - loss: 0.1071 - root_mean_squared_error: 0.3272
Epoch 00022: val_loss improved from 0.10532 to 0.10483, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0022-0.1048.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.1073 - root_mean_squared_error: 0.3276 - val_loss: 0.1048 - val_root_mean_squared_error: 0.3238
Epoch 23/100
639/641 [============================>.] - ETA: 0s - loss: 0.0982 - root_mean_squared_error: 0.3134
Epoch 00023: val_loss did not improve from 0.10483
641/641 [==============================] - 8s 12ms/step - loss: 0.0981 - root_mean_squared_error: 0.3133 - val_loss: 0.1128 - val_root_mean_squared_error: 0.3359
Epoch 24/100
638/641 [============================>.] - ETA: 0s - loss: 0.0928 - root_mean_squared_error: 0.3046
Epoch 00024: val_loss improved from 0.10483 to 0.09814, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0024-0.0981.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0928 - root_mean_squared_error: 0.3046 - val_loss: 0.0981 - val_root_mean_squared_error: 0.3133
Epoch 25/100
640/641 [============================>.] - ETA: 0s - loss: 0.0960 - root_mean_squared_error: 0.3098
Epoch 00025: val_loss improved from 0.09814 to 0.09731, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0025-0.0973.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0959 - root_mean_squared_error: 0.3097 - val_loss: 0.0973 - val_root_mean_squared_error: 0.3119
Epoch 26/100
640/641 [============================>.] - ETA: 0s - loss: 0.0899 - root_mean_squared_error: 0.2998
Epoch 00026: val_loss improved from 0.09731 to 0.08995, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0026-0.0899.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0898 - root_mean_squared_error: 0.2997 - val_loss: 0.0899 - val_root_mean_squared_error: 0.2999
Epoch 27/100
640/641 [============================>.] - ETA: 0s - loss: 0.0840 - root_mean_squared_error: 0.2898
Epoch 00027: val_loss did not improve from 0.08995
641/641 [==============================] - 8s 12ms/step - loss: 0.0843 - root_mean_squared_error: 0.2903 - val_loss: 0.0910 - val_root_mean_squared_error: 0.3016
Epoch 28/100
636/641 [============================>.] - ETA: 0s - loss: 0.0845 - root_mean_squared_error: 0.2907
Epoch 00028: val_loss improved from 0.08995 to 0.07935, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0028-0.0794.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0845 - root_mean_squared_error: 0.2907 - val_loss: 0.0794 - val_root_mean_squared_error: 0.2817
Epoch 29/100
638/641 [============================>.] - ETA: 0s - loss: 0.0809 - root_mean_squared_error: 0.2845
Epoch 00029: val_loss did not improve from 0.07935
641/641 [==============================] - 8s 12ms/step - loss: 0.0809 - root_mean_squared_error: 0.2844 - val_loss: 0.0886 - val_root_mean_squared_error: 0.2977
Epoch 30/100
639/641 [============================>.] - ETA: 0s - loss: 0.0772 - root_mean_squared_error: 0.2779
Epoch 00030: val_loss did not improve from 0.07935
641/641 [==============================] - 8s 12ms/step - loss: 0.0774 - root_mean_squared_error: 0.2781 - val_loss: 0.0810 - val_root_mean_squared_error: 0.2847
Epoch 31/100
641/641 [==============================] - ETA: 0s - loss: 0.0754 - root_mean_squared_error: 0.2746
Epoch 00031: val_loss improved from 0.07935 to 0.07370, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0031-0.0737.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0754 - root_mean_squared_error: 0.2746 - val_loss: 0.0737 - val_root_mean_squared_error: 0.2715
Epoch 32/100
639/641 [============================>.] - ETA: 0s - loss: 0.0749 - root_mean_squared_error: 0.2737
Epoch 00032: val_loss did not improve from 0.07370
641/641 [==============================] - 8s 12ms/step - loss: 0.0748 - root_mean_squared_error: 0.2735 - val_loss: 0.0888 - val_root_mean_squared_error: 0.2980
Epoch 33/100
640/641 [============================>.] - ETA: 0s - loss: 0.0692 - root_mean_squared_error: 0.2631
Epoch 00033: val_loss did not improve from 0.07370
641/641 [==============================] - 8s 12ms/step - loss: 0.0692 - root_mean_squared_error: 0.2631 - val_loss: 0.0746 - val_root_mean_squared_error: 0.2730
Epoch 34/100
639/641 [============================>.] - ETA: 0s - loss: 0.0700 - root_mean_squared_error: 0.2646
Epoch 00034: val_loss did not improve from 0.07370
641/641 [==============================] - 8s 12ms/step - loss: 0.0699 - root_mean_squared_error: 0.2643 - val_loss: 0.0749 - val_root_mean_squared_error: 0.2736
Epoch 35/100
637/641 [============================>.] - ETA: 0s - loss: 0.0670 - root_mean_squared_error: 0.2588
Epoch 00035: val_loss improved from 0.07370 to 0.06660, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0035-0.0666.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0676 - root_mean_squared_error: 0.2601 - val_loss: 0.0666 - val_root_mean_squared_error: 0.2581
Epoch 36/100
638/641 [============================>.] - ETA: 0s - loss: 0.0691 - root_mean_squared_error: 0.2628
Epoch 00036: val_loss did not improve from 0.06660
641/641 [==============================] - 8s 12ms/step - loss: 0.0690 - root_mean_squared_error: 0.2628 - val_loss: 0.0733 - val_root_mean_squared_error: 0.2708
Epoch 37/100
640/641 [============================>.] - ETA: 0s - loss: 0.0639 - root_mean_squared_error: 0.2528
Epoch 00037: val_loss did not improve from 0.06660
641/641 [==============================] - 8s 12ms/step - loss: 0.0639 - root_mean_squared_error: 0.2527 - val_loss: 0.0716 - val_root_mean_squared_error: 0.2676
Epoch 38/100
638/641 [============================>.] - ETA: 0s - loss: 0.0639 - root_mean_squared_error: 0.2527
Epoch 00038: val_loss did not improve from 0.06660
641/641 [==============================] - 8s 12ms/step - loss: 0.0638 - root_mean_squared_error: 0.2525 - val_loss: 0.0719 - val_root_mean_squared_error: 0.2681
Epoch 39/100
640/641 [============================>.] - ETA: 0s - loss: 0.0654 - root_mean_squared_error: 0.2557
Epoch 00039: val_loss did not improve from 0.06660
641/641 [==============================] - 8s 12ms/step - loss: 0.0653 - root_mean_squared_error: 0.2556 - val_loss: 0.0762 - val_root_mean_squared_error: 0.2761
Epoch 40/100
639/641 [============================>.] - ETA: 0s - loss: 0.0635 - root_mean_squared_error: 0.2519
Epoch 00040: val_loss did not improve from 0.06660
641/641 [==============================] - 8s 12ms/step - loss: 0.0634 - root_mean_squared_error: 0.2518 - val_loss: 0.0667 - val_root_mean_squared_error: 0.2583
Epoch 41/100
640/641 [============================>.] - ETA: 0s - loss: 0.0569 - root_mean_squared_error: 0.2385
Epoch 00041: val_loss improved from 0.06660 to 0.06556, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0041-0.0656.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0568 - root_mean_squared_error: 0.2384 - val_loss: 0.0656 - val_root_mean_squared_error: 0.2560
Epoch 42/100
640/641 [============================>.] - ETA: 0s - loss: 0.0578 - root_mean_squared_error: 0.2405
Epoch 00042: val_loss did not improve from 0.06556
641/641 [==============================] - 8s 12ms/step - loss: 0.0578 - root_mean_squared_error: 0.2404 - val_loss: 0.0688 - val_root_mean_squared_error: 0.2623
Epoch 43/100
638/641 [============================>.] - ETA: 0s - loss: 0.0561 - root_mean_squared_error: 0.2369
Epoch 00043: val_loss did not improve from 0.06556
641/641 [==============================] - 8s 12ms/step - loss: 0.0560 - root_mean_squared_error: 0.2366 - val_loss: 0.0728 - val_root_mean_squared_error: 0.2699
Epoch 44/100
640/641 [============================>.] - ETA: 0s - loss: 0.0580 - root_mean_squared_error: 0.2408
Epoch 00044: val_loss improved from 0.06556 to 0.06468, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0044-0.0647.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0580 - root_mean_squared_error: 0.2408 - val_loss: 0.0647 - val_root_mean_squared_error: 0.2543
Epoch 45/100
638/641 [============================>.] - ETA: 0s - loss: 0.0551 - root_mean_squared_error: 0.2348
Epoch 00045: val_loss did not improve from 0.06468
641/641 [==============================] - 8s 12ms/step - loss: 0.0553 - root_mean_squared_error: 0.2351 - val_loss: 0.0697 - val_root_mean_squared_error: 0.2641
Epoch 46/100
638/641 [============================>.] - ETA: 0s - loss: 0.0596 - root_mean_squared_error: 0.2442
Epoch 00046: val_loss did not improve from 0.06468
641/641 [==============================] - 8s 12ms/step - loss: 0.0598 - root_mean_squared_error: 0.2445 - val_loss: 0.0798 - val_root_mean_squared_error: 0.2825
Epoch 47/100
640/641 [============================>.] - ETA: 0s - loss: 0.0564 - root_mean_squared_error: 0.2375
Epoch 00047: val_loss did not improve from 0.06468
641/641 [==============================] - 8s 12ms/step - loss: 0.0565 - root_mean_squared_error: 0.2377 - val_loss: 0.0650 - val_root_mean_squared_error: 0.2550
Epoch 48/100
640/641 [============================>.] - ETA: 0s - loss: 0.0505 - root_mean_squared_error: 0.2247
Epoch 00048: val_loss did not improve from 0.06468
641/641 [==============================] - 8s 12ms/step - loss: 0.0506 - root_mean_squared_error: 0.2249 - val_loss: 0.0749 - val_root_mean_squared_error: 0.2736
Epoch 49/100
640/641 [============================>.] - ETA: 0s - loss: 0.0540 - root_mean_squared_error: 0.2323
Epoch 00049: val_loss improved from 0.06468 to 0.06420, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0049-0.0642.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0540 - root_mean_squared_error: 0.2323 - val_loss: 0.0642 - val_root_mean_squared_error: 0.2534
Epoch 50/100
637/641 [============================>.] - ETA: 0s - loss: 0.0538 - root_mean_squared_error: 0.2320
Epoch 00050: val_loss improved from 0.06420 to 0.06117, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0050-0.0612.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0539 - root_mean_squared_error: 0.2322 - val_loss: 0.0612 - val_root_mean_squared_error: 0.2473
Epoch 51/100
639/641 [============================>.] - ETA: 0s - loss: 0.0547 - root_mean_squared_error: 0.2338
Epoch 00051: val_loss improved from 0.06117 to 0.06054, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0051-0.0605.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0550 - root_mean_squared_error: 0.2345 - val_loss: 0.0605 - val_root_mean_squared_error: 0.2461
Epoch 52/100
639/641 [============================>.] - ETA: 0s - loss: 0.0524 - root_mean_squared_error: 0.2289
Epoch 00052: val_loss did not improve from 0.06054
641/641 [==============================] - 8s 12ms/step - loss: 0.0524 - root_mean_squared_error: 0.2290 - val_loss: 0.0666 - val_root_mean_squared_error: 0.2580
Epoch 53/100
640/641 [============================>.] - ETA: 0s - loss: 0.0497 - root_mean_squared_error: 0.2230
Epoch 00053: val_loss improved from 0.06054 to 0.06053, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0053-0.0605.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0497 - root_mean_squared_error: 0.2230 - val_loss: 0.0605 - val_root_mean_squared_error: 0.2460
Epoch 54/100
637/641 [============================>.] - ETA: 0s - loss: 0.0497 - root_mean_squared_error: 0.2230
Epoch 00054: val_loss did not improve from 0.06053
641/641 [==============================] - 8s 12ms/step - loss: 0.0497 - root_mean_squared_error: 0.2229 - val_loss: 0.0654 - val_root_mean_squared_error: 0.2558
Epoch 55/100
638/641 [============================>.] - ETA: 0s - loss: 0.0518 - root_mean_squared_error: 0.2276
Epoch 00055: val_loss did not improve from 0.06053
641/641 [==============================] - 8s 12ms/step - loss: 0.0520 - root_mean_squared_error: 0.2280 - val_loss: 0.0657 - val_root_mean_squared_error: 0.2564
Epoch 56/100
639/641 [============================>.] - ETA: 0s - loss: 0.0477 - root_mean_squared_error: 0.2184
Epoch 00056: val_loss improved from 0.06053 to 0.05946, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0056-0.0595.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0478 - root_mean_squared_error: 0.2186 - val_loss: 0.0595 - val_root_mean_squared_error: 0.2439
Epoch 57/100
638/641 [============================>.] - ETA: 0s - loss: 0.0482 - root_mean_squared_error: 0.2196
Epoch 00057: val_loss did not improve from 0.05946
641/641 [==============================] - 8s 12ms/step - loss: 0.0481 - root_mean_squared_error: 0.2193 - val_loss: 0.0632 - val_root_mean_squared_error: 0.2514
Epoch 58/100
638/641 [============================>.] - ETA: 0s - loss: 0.0457 - root_mean_squared_error: 0.2138
Epoch 00058: val_loss improved from 0.05946 to 0.05750, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0058-0.0575.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0456 - root_mean_squared_error: 0.2136 - val_loss: 0.0575 - val_root_mean_squared_error: 0.2398
Epoch 59/100
640/641 [============================>.] - ETA: 0s - loss: 0.0516 - root_mean_squared_error: 0.2271
Epoch 00059: val_loss did not improve from 0.05750
641/641 [==============================] - 8s 12ms/step - loss: 0.0516 - root_mean_squared_error: 0.2271 - val_loss: 0.0650 - val_root_mean_squared_error: 0.2550
Epoch 60/100
641/641 [==============================] - ETA: 0s - loss: 0.0482 - root_mean_squared_error: 0.2196
Epoch 00060: val_loss improved from 0.05750 to 0.05612, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0060-0.0561.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0482 - root_mean_squared_error: 0.2196 - val_loss: 0.0561 - val_root_mean_squared_error: 0.2369
Epoch 61/100
641/641 [==============================] - ETA: 0s - loss: 0.0450 - root_mean_squared_error: 0.2121
Epoch 00061: val_loss did not improve from 0.05612
641/641 [==============================] - 8s 12ms/step - loss: 0.0450 - root_mean_squared_error: 0.2121 - val_loss: 0.0615 - val_root_mean_squared_error: 0.2480
Epoch 62/100
641/641 [==============================] - ETA: 0s - loss: 0.0423 - root_mean_squared_error: 0.2056
Epoch 00062: val_loss did not improve from 0.05612
641/641 [==============================] - 8s 12ms/step - loss: 0.0423 - root_mean_squared_error: 0.2056 - val_loss: 0.0644 - val_root_mean_squared_error: 0.2538
Epoch 63/100
640/641 [============================>.] - ETA: 0s - loss: 0.0454 - root_mean_squared_error: 0.2132
Epoch 00063: val_loss did not improve from 0.05612
641/641 [==============================] - 8s 12ms/step - loss: 0.0454 - root_mean_squared_error: 0.2131 - val_loss: 0.0601 - val_root_mean_squared_error: 0.2452
Epoch 64/100
640/641 [============================>.] - ETA: 0s - loss: 0.0446 - root_mean_squared_error: 0.2112
Epoch 00064: val_loss improved from 0.05612 to 0.05331, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0064-0.0533.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0446 - root_mean_squared_error: 0.2111 - val_loss: 0.0533 - val_root_mean_squared_error: 0.2309
Epoch 65/100
640/641 [============================>.] - ETA: 0s - loss: 0.0434 - root_mean_squared_error: 0.2084
Epoch 00065: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0434 - root_mean_squared_error: 0.2083 - val_loss: 0.0687 - val_root_mean_squared_error: 0.2621
Epoch 66/100
640/641 [============================>.] - ETA: 0s - loss: 0.0423 - root_mean_squared_error: 0.2057
Epoch 00066: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0423 - root_mean_squared_error: 0.2056 - val_loss: 0.0579 - val_root_mean_squared_error: 0.2407
Epoch 67/100
640/641 [============================>.] - ETA: 0s - loss: 0.0413 - root_mean_squared_error: 0.2033
Epoch 00067: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0413 - root_mean_squared_error: 0.2033 - val_loss: 0.0553 - val_root_mean_squared_error: 0.2352
Epoch 68/100
638/641 [============================>.] - ETA: 0s - loss: 0.0417 - root_mean_squared_error: 0.2042
Epoch 00068: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0417 - root_mean_squared_error: 0.2042 - val_loss: 0.0548 - val_root_mean_squared_error: 0.2340
Epoch 69/100
641/641 [==============================] - ETA: 0s - loss: 0.0442 - root_mean_squared_error: 0.2102
Epoch 00069: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0442 - root_mean_squared_error: 0.2102 - val_loss: 0.0640 - val_root_mean_squared_error: 0.2529
Epoch 70/100
641/641 [==============================] - ETA: 0s - loss: 0.0432 - root_mean_squared_error: 0.2077
Epoch 00070: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0432 - root_mean_squared_error: 0.2077 - val_loss: 0.0573 - val_root_mean_squared_error: 0.2393
Epoch 71/100
636/641 [============================>.] - ETA: 0s - loss: 0.0401 - root_mean_squared_error: 0.2003
Epoch 00071: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0409 - root_mean_squared_error: 0.2022 - val_loss: 0.0612 - val_root_mean_squared_error: 0.2474
Epoch 72/100
636/641 [============================>.] - ETA: 0s - loss: 0.0401 - root_mean_squared_error: 0.2004
Epoch 00072: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0403 - root_mean_squared_error: 0.2007 - val_loss: 0.0539 - val_root_mean_squared_error: 0.2322
Epoch 73/100
641/641 [==============================] - ETA: 0s - loss: 0.0400 - root_mean_squared_error: 0.2000
Epoch 00073: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0400 - root_mean_squared_error: 0.2000 - val_loss: 0.0571 - val_root_mean_squared_error: 0.2389
Epoch 74/100
640/641 [============================>.] - ETA: 0s - loss: 0.0402 - root_mean_squared_error: 0.2005
Epoch 00074: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0402 - root_mean_squared_error: 0.2005 - val_loss: 0.0660 - val_root_mean_squared_error: 0.2569
Epoch 75/100
639/641 [============================>.] - ETA: 0s - loss: 0.0398 - root_mean_squared_error: 0.1995
Epoch 00075: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0397 - root_mean_squared_error: 0.1993 - val_loss: 0.0579 - val_root_mean_squared_error: 0.2405
Epoch 76/100
641/641 [==============================] - ETA: 0s - loss: 0.0387 - root_mean_squared_error: 0.1966
Epoch 00076: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0387 - root_mean_squared_error: 0.1966 - val_loss: 0.0593 - val_root_mean_squared_error: 0.2435
Epoch 77/100
636/641 [============================>.] - ETA: 0s - loss: 0.0350 - root_mean_squared_error: 0.1871
Epoch 00077: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0350 - root_mean_squared_error: 0.1870 - val_loss: 0.0540 - val_root_mean_squared_error: 0.2323
Epoch 78/100
639/641 [============================>.] - ETA: 0s - loss: 0.0385 - root_mean_squared_error: 0.1963
Epoch 00078: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0384 - root_mean_squared_error: 0.1961 - val_loss: 0.0568 - val_root_mean_squared_error: 0.2383
Epoch 79/100
638/641 [============================>.] - ETA: 0s - loss: 0.0431 - root_mean_squared_error: 0.2076
Epoch 00079: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0431 - root_mean_squared_error: 0.2075 - val_loss: 0.0575 - val_root_mean_squared_error: 0.2399
Epoch 80/100
640/641 [============================>.] - ETA: 0s - loss: 0.0400 - root_mean_squared_error: 0.1999
Epoch 00080: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0399 - root_mean_squared_error: 0.1999 - val_loss: 0.0551 - val_root_mean_squared_error: 0.2346
Epoch 81/100
641/641 [==============================] - ETA: 0s - loss: 0.0386 - root_mean_squared_error: 0.1964
Epoch 00081: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0386 - root_mean_squared_error: 0.1964 - val_loss: 0.0581 - val_root_mean_squared_error: 0.2410
Epoch 82/100
641/641 [==============================] - ETA: 0s - loss: 0.0377 - root_mean_squared_error: 0.1940
Epoch 00082: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0377 - root_mean_squared_error: 0.1940 - val_loss: 0.0563 - val_root_mean_squared_error: 0.2374
Epoch 83/100
641/641 [==============================] - ETA: 0s - loss: 0.0354 - root_mean_squared_error: 0.1881
Epoch 00083: val_loss did not improve from 0.05331
641/641 [==============================] - 8s 12ms/step - loss: 0.0354 - root_mean_squared_error: 0.1881 - val_loss: 0.0560 - val_root_mean_squared_error: 0.2367
Epoch 84/100
637/641 [============================>.] - ETA: 0s - loss: 0.0391 - root_mean_squared_error: 0.1977
Epoch 00084: val_loss improved from 0.05331 to 0.05100, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0084-0.0510.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0405 - root_mean_squared_error: 0.2011 - val_loss: 0.0510 - val_root_mean_squared_error: 0.2258
Epoch 85/100
640/641 [============================>.] - ETA: 0s - loss: 0.0371 - root_mean_squared_error: 0.1927
Epoch 00085: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0371 - root_mean_squared_error: 0.1926 - val_loss: 0.0533 - val_root_mean_squared_error: 0.2309
Epoch 86/100
637/641 [============================>.] - ETA: 0s - loss: 0.0381 - root_mean_squared_error: 0.1953
Epoch 00086: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0381 - root_mean_squared_error: 0.1952 - val_loss: 0.0514 - val_root_mean_squared_error: 0.2267
Epoch 87/100
641/641 [==============================] - ETA: 0s - loss: 0.0359 - root_mean_squared_error: 0.1893
Epoch 00087: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0359 - root_mean_squared_error: 0.1893 - val_loss: 0.0599 - val_root_mean_squared_error: 0.2447
Epoch 88/100
640/641 [============================>.] - ETA: 0s - loss: 0.0350 - root_mean_squared_error: 0.1870
Epoch 00088: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0349 - root_mean_squared_error: 0.1869 - val_loss: 0.0551 - val_root_mean_squared_error: 0.2347
Epoch 89/100
639/641 [============================>.] - ETA: 0s - loss: 0.0358 - root_mean_squared_error: 0.1893
Epoch 00089: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0358 - root_mean_squared_error: 0.1892 - val_loss: 0.0579 - val_root_mean_squared_error: 0.2407
Epoch 90/100
641/641 [==============================] - ETA: 0s - loss: 0.0333 - root_mean_squared_error: 0.1825
Epoch 00090: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0333 - root_mean_squared_error: 0.1825 - val_loss: 0.0565 - val_root_mean_squared_error: 0.2378
Epoch 91/100
636/641 [============================>.] - ETA: 0s - loss: 0.0378 - root_mean_squared_error: 0.1946
Epoch 00091: val_loss did not improve from 0.05100
641/641 [==============================] - 7s 12ms/step - loss: 0.0377 - root_mean_squared_error: 0.1941 - val_loss: 0.0560 - val_root_mean_squared_error: 0.2366
Epoch 92/100
637/641 [============================>.] - ETA: 0s - loss: 0.0347 - root_mean_squared_error: 0.1862
Epoch 00092: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0346 - root_mean_squared_error: 0.1859 - val_loss: 0.0552 - val_root_mean_squared_error: 0.2348
Epoch 93/100
640/641 [============================>.] - ETA: 0s - loss: 0.0326 - root_mean_squared_error: 0.1805
Epoch 00093: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0326 - root_mean_squared_error: 0.1805 - val_loss: 0.0619 - val_root_mean_squared_error: 0.2488
Epoch 94/100
637/641 [============================>.] - ETA: 0s - loss: 0.0364 - root_mean_squared_error: 0.1907
Epoch 00094: val_loss did not improve from 0.05100
641/641 [==============================] - 8s 12ms/step - loss: 0.0362 - root_mean_squared_error: 0.1904 - val_loss: 0.0516 - val_root_mean_squared_error: 0.2271
Epoch 95/100
640/641 [============================>.] - ETA: 0s - loss: 0.0337 - root_mean_squared_error: 0.1837
Epoch 00095: val_loss improved from 0.05100 to 0.04950, saving model to /content/drive/My Drive/epochs/model_1_camera1_lr:0.0001.0095-0.0495.h5
641/641 [==============================] - 8s 12ms/step - loss: 0.0338 - root_mean_squared_error: 0.1837 - val_loss: 0.0495 - val_root_mean_squared_error: 0.2225
Epoch 96/100
641/641 [==============================] - ETA: 0s - loss: 0.0318 - root_mean_squared_error: 0.1783
Epoch 00096: val_loss did not improve from 0.04950
641/641 [==============================] - 8s 12ms/step - loss: 0.0318 - root_mean_squared_error: 0.1783 - val_loss: 0.0533 - val_root_mean_squared_error: 0.2309
Epoch 97/100
641/641 [==============================] - ETA: 0s - loss: 0.0369 - root_mean_squared_error: 0.1920
Epoch 00097: val_loss did not improve from 0.04950
641/641 [==============================] - 8s 12ms/step - loss: 0.0369 - root_mean_squared_error: 0.1920 - val_loss: 0.0505 - val_root_mean_squared_error: 0.2248
Epoch 98/100
638/641 [============================>.] - ETA: 0s - loss: 0.0364 - root_mean_squared_error: 0.1907
Epoch 00098: val_loss did not improve from 0.04950
641/641 [==============================] - 8s 12ms/step - loss: 0.0363 - root_mean_squared_error: 0.1906 - val_loss: 0.0567 - val_root_mean_squared_error: 0.2382
Epoch 99/100
639/641 [============================>.] - ETA: 0s - loss: 0.0317 - root_mean_squared_error: 0.1781
Epoch 00099: val_loss did not improve from 0.04950
641/641 [==============================] - 8s 12ms/step - loss: 0.0317 - root_mean_squared_error: 0.1781 - val_loss: 0.0544 - val_root_mean_squared_error: 0.2332
Epoch 100/100
641/641 [==============================] - ETA: 0s - loss: 0.0331 - root_mean_squared_error: 0.1818
Epoch 00100: val_loss did not improve from 0.04950
641/641 [==============================] - 8s 12ms/step - loss: 0.0331 - root_mean_squared_error: 0.1818 - val_loss: 0.0560 - val_root_mean_squared_error: 0.2367
ticks = [i for i in range(0, 101, 10)]
labels = [i for i in range(0, 101, 10)]
labels[0] = 1_____no_output_____labels_____no_output_____ticks_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(20, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch for Camera1', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Mean Squared Error', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18);_____no_output_____print(history.history.keys())dict_keys(['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])
model_1_camera1 = pd.DataFrame({'loss': history.history['loss'],
'root_mean_squared_error': history.history['root_mean_squared_error'],
'val_loss': history.history['val_loss'],
'val_root_mean_squared_error': history.history['val_root_mean_squared_error']},
columns = ['loss', 'root_mean_squared_error', 'val_loss', 'val_root_mean_squared_error'])_____no_output_____model_1_camera1.to_csv('/content/drive/My Drive/datasets/model_1_camera1.csv', index=False)_____no_output_____train_loss = history.history['loss']
test_loss = history.history['val_loss']
# Set figure size.
plt.figure(figsize=(12, 8))
# Generate line plot of training, testing loss over epochs.
plt.plot(train_loss, label='Training Loss', color='#185fad')
plt.plot(test_loss, label='Testing Loss', color='orange')
# Set title
plt.title('Training and Testing Loss by Epoch', fontsize = 25)
plt.xlabel('Epoch', fontsize = 18)
plt.ylabel('Adam', fontsize = 18)
plt.xticks(ticks, labels)
plt.legend(fontsize = 18);_____no_output_____model_2 = Sequential()
model_2.add(Conv2D(16, (3, 3), input_shape=(80, 160, 1), activation='relu'))
model_2.add(MaxPooling2D(pool_size=(2, 2)))
model_2.add(Dropout(.25))
model_2.add(Conv2D(32, (3, 3), activation='relu'))
model_2.add(MaxPooling2D(pool_size=(2, 2)))
model_2.add(Dropout(.25))
model_2.add(Conv2D(64, (3, 3), activation='relu'))
model_2.add(MaxPooling2D(pool_size=(2, 2)))
model_2.add(Dropout(.25))
model_2.add(Flatten())
model_2.add(Dense(4096, activation='relu'))
model_2.add(Dropout(.5))
model_2.add(Dense(2048, activation='relu'))
model_2.add(Dropout(.5))
model_2.add(Dense(1024, activation='relu'))
model_2.add(Dropout(.5))
model_2.add(Dense(512, activation='relu'))
model_2.add(Dropout(.5))
model_2.add(Dense(1))
model_2.compile(loss='mse', optimizer='adam', metrics=[RootMeanSquaredError()])_____no_output_____from keras.callbacks import *
filepath = "/content/drive/My Drive/epochs/model_2_shuffled.{epoch:03d}-{val_loss:.3f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]_____no_output_____history = model_2.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=30,
verbose=1,
callbacks=callbacks_list)Train on 41003 samples, validate on 10251 samples
Epoch 1/30
41003/41003 [==============================] - 41s 1ms/step - loss: 0.3027 - root_mean_squared_error: 0.5502 - val_loss: 0.2351 - val_root_mean_squared_error: 0.4849
Epoch 00001: val_loss improved from -inf to 0.23513, saving model to /content/drive/My Drive/epochs/model_2_shuffled.001-0.235.h5
Epoch 2/30
41003/41003 [==============================] - 37s 896us/step - loss: 0.2532 - root_mean_squared_error: 0.5032 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00002: val_loss did not improve from 0.23513
Epoch 3/30
41003/41003 [==============================] - 37s 902us/step - loss: 0.2531 - root_mean_squared_error: 0.5031 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00003: val_loss did not improve from 0.23513
Epoch 4/30
41003/41003 [==============================] - 37s 911us/step - loss: 0.2531 - root_mean_squared_error: 0.5031 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00004: val_loss did not improve from 0.23513
Epoch 5/30
41003/41003 [==============================] - 36s 879us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00005: val_loss did not improve from 0.23513
Epoch 6/30
41003/41003 [==============================] - 36s 873us/step - loss: 0.2530 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00006: val_loss did not improve from 0.23513
Epoch 7/30
41003/41003 [==============================] - 36s 872us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00007: val_loss did not improve from 0.23513
Epoch 8/30
41003/41003 [==============================] - 36s 874us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2349 - val_root_mean_squared_error: 0.4847
Epoch 00008: val_loss did not improve from 0.23513
Epoch 9/30
41003/41003 [==============================] - 36s 873us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2353 - val_root_mean_squared_error: 0.4850
Epoch 00009: val_loss improved from 0.23513 to 0.23527, saving model to /content/drive/My Drive/epochs/model_2_shuffled.009-0.235.h5
Epoch 10/30
41003/41003 [==============================] - 36s 876us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00010: val_loss did not improve from 0.23527
Epoch 11/30
41003/41003 [==============================] - 36s 873us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00011: val_loss did not improve from 0.23527
Epoch 12/30
41003/41003 [==============================] - 36s 874us/step - loss: 0.2535 - root_mean_squared_error: 0.5035 - val_loss: 0.2351 - val_root_mean_squared_error: 0.4849
Epoch 00012: val_loss did not improve from 0.23527
Epoch 13/30
41003/41003 [==============================] - 36s 872us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2359 - val_root_mean_squared_error: 0.4857
Epoch 00013: val_loss improved from 0.23527 to 0.23593, saving model to /content/drive/My Drive/epochs/model_2_shuffled.013-0.236.h5
Epoch 14/30
41003/41003 [==============================] - 36s 880us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2349 - val_root_mean_squared_error: 0.4846
Epoch 00014: val_loss did not improve from 0.23593
Epoch 15/30
41003/41003 [==============================] - 36s 875us/step - loss: 0.2530 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00015: val_loss did not improve from 0.23593
Epoch 16/30
41003/41003 [==============================] - 36s 875us/step - loss: 0.2531 - root_mean_squared_error: 0.5031 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00016: val_loss did not improve from 0.23593
Epoch 17/30
41003/41003 [==============================] - 36s 872us/step - loss: 0.2530 - root_mean_squared_error: 0.5030 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00017: val_loss did not improve from 0.23593
Epoch 18/30
41003/41003 [==============================] - 36s 875us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2349 - val_root_mean_squared_error: 0.4846
Epoch 00018: val_loss did not improve from 0.23593
Epoch 19/30
41003/41003 [==============================] - 36s 877us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00019: val_loss did not improve from 0.23593
Epoch 20/30
41003/41003 [==============================] - 36s 871us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00020: val_loss did not improve from 0.23593
Epoch 21/30
41003/41003 [==============================] - 36s 874us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00021: val_loss did not improve from 0.23593
Epoch 22/30
41003/41003 [==============================] - 36s 871us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00022: val_loss did not improve from 0.23593
Epoch 23/30
41003/41003 [==============================] - 36s 868us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00023: val_loss did not improve from 0.23593
Epoch 24/30
41003/41003 [==============================] - 36s 868us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00024: val_loss did not improve from 0.23593
Epoch 25/30
41003/41003 [==============================] - 36s 877us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00025: val_loss did not improve from 0.23593
Epoch 26/30
41003/41003 [==============================] - 36s 871us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00026: val_loss did not improve from 0.23593
Epoch 27/30
41003/41003 [==============================] - 36s 867us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00027: val_loss did not improve from 0.23593
Epoch 28/30
41003/41003 [==============================] - 36s 876us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00028: val_loss did not improve from 0.23593
Epoch 29/30
41003/41003 [==============================] - 36s 883us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00029: val_loss did not improve from 0.23593
Epoch 30/30
41003/41003 [==============================] - 36s 880us/step - loss: 0.2531 - root_mean_squared_error: 0.5031 - val_loss: 0.2349 - val_root_mean_squared_error: 0.4846
Epoch 00030: val_loss did not improve from 0.23593
model_3 = Sequential()
model_3.add(Conv2D(16, (3, 3), input_shape=(80, 160, 1), activation='relu'))
model_3.add(MaxPooling2D(pool_size=(2, 2)))
model_3.add(Dropout(.25))
model_3.add(Conv2D(32, (3, 3), activation='relu'))
model_3.add(MaxPooling2D(pool_size=(2, 2)))
model_3.add(Dropout(.25))
model_3.add(Conv2D(64, (3, 3), activation='relu'))
model_3.add(MaxPooling2D(pool_size=(2, 2)))
model_3.add(Dropout(.25))
model_3.add(Conv2D(128, (3, 3), activation='relu'))
model_3.add(MaxPooling2D(pool_size=(2, 2)))
model_3.add(Dropout(.25))
model_3.add(Flatten())
model_3.add(Dense(4096, activation='relu'))
model_3.add(Dropout(.5))
model_3.add(Dense(2048, activation='relu'))
model_3.add(Dropout(.5))
model_3.add(Dense(1024, activation='relu'))
model_3.add(Dropout(.5))
model_3.add(Dense(512, activation='relu'))
model_3.add(Dropout(.5))
model_3.add(Dense(1))
model_3.compile(loss='mse', optimizer='adam', metrics=[RootMeanSquaredError()])_____no_output_____from keras.callbacks import *
filepath = "/content/drive/My Drive/epochs/model_3_shuffled.{epoch:03d}-{val_loss:.3f}.h5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]_____no_output_____history = model_3.fit(X_train,
y_train,
batch_size=64,
validation_data=(X_test, y_test),
epochs=15,
verbose=1,
callbacks=callbacks_list)Train on 41003 samples, validate on 10251 samples
Epoch 1/15
41003/41003 [==============================] - 26s 633us/step - loss: 0.2560 - root_mean_squared_error: 0.5060 - val_loss: 0.2350 - val_root_mean_squared_error: 0.4848
Epoch 00001: val_loss improved from -inf to 0.23499, saving model to /content/drive/My Drive/epochs/model_3_shuffled.001-0.235.h5
Epoch 2/15
41003/41003 [==============================] - 25s 621us/step - loss: 0.2531 - root_mean_squared_error: 0.5031 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00002: val_loss did not improve from 0.23499
Epoch 3/15
41003/41003 [==============================] - 25s 618us/step - loss: 0.2530 - root_mean_squared_error: 0.5030 - val_loss: 0.2349 - val_root_mean_squared_error: 0.4846
Epoch 00003: val_loss did not improve from 0.23499
Epoch 4/15
41003/41003 [==============================] - 25s 621us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00004: val_loss did not improve from 0.23499
Epoch 5/15
41003/41003 [==============================] - 25s 622us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00005: val_loss did not improve from 0.23499
Epoch 6/15
41003/41003 [==============================] - 26s 628us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00006: val_loss did not improve from 0.23499
Epoch 7/15
41003/41003 [==============================] - 26s 623us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4846
Epoch 00007: val_loss did not improve from 0.23499
Epoch 8/15
41003/41003 [==============================] - 26s 623us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00008: val_loss did not improve from 0.23499
Epoch 9/15
41003/41003 [==============================] - 26s 624us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00009: val_loss did not improve from 0.23499
Epoch 10/15
41003/41003 [==============================] - 26s 627us/step - loss: 0.2529 - root_mean_squared_error: 0.5029 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00010: val_loss did not improve from 0.23499
Epoch 11/15
41003/41003 [==============================] - 26s 624us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00011: val_loss did not improve from 0.23499
Epoch 12/15
41003/41003 [==============================] - 26s 623us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00012: val_loss did not improve from 0.23499
Epoch 13/15
41003/41003 [==============================] - 25s 621us/step - loss: 0.2528 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00013: val_loss did not improve from 0.23499
Epoch 14/15
41003/41003 [==============================] - 25s 621us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2348 - val_root_mean_squared_error: 0.4845
Epoch 00014: val_loss did not improve from 0.23499
Epoch 15/15
41003/41003 [==============================] - 25s 620us/step - loss: 0.2529 - root_mean_squared_error: 0.5028 - val_loss: 0.2347 - val_root_mean_squared_error: 0.4845
Epoch 00015: val_loss did not improve from 0.23499
####### loading code
X = load('/content/drive/My Drive/camera1_train.npz')
X = X.f.arr_0
log1 = pd.read_csv('/content/drive/My Drive/log1_train.csv')
y = log1['steering_avg_radian']
y = y.to_numpy()
y = y.reshape(y.shape[0], 1)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=False)
####### end of loading code_____no_output_____!cp -r "/content/drive/My Drive/camera1_train.npz" ./camera1_train.npz_____no_output_____X = load('./camera1_train.npz')_____no_output_____X = X.f.arr_0_____no_output_____log1 = pd.read_csv('/content/drive/My Drive/log1_train.csv')
y = log1['steering_avg_radian']
y = y.to_numpy()
y = y.reshape(y.shape[0], 1)_____no_output_____from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, shuffle=True)_____no_output_____savez_compressed('/content/drive/My Drive/X_train_shuffled', X_train)_____no_output_____savez_compressed('/content/drive/My Drive/X_test_shuffled', X_test)_____no_output_____savez_compressed('/content/drive/My Drive/y_train_shuffled', y_train)_____no_output_____savez_compressed('/content/drive/My Drive/y_test_shuffled', y_test)_____no_output_____!nvidia-smiMon Aug 3 15:26:27 2020
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 450.57 Driver Version: 418.67 CUDA Version: 10.1 |
|-------------------------------+----------------------+----------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|===============================+======================+======================|
| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |
| N/A 72C P0 33W / 70W | 3167MiB / 15079MiB | 0% Default |
| | | ERR! |
+-------------------------------+----------------------+----------------------+
+-----------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=============================================================================|
| No running processes found |
+-----------------------------------------------------------------------------+
####### loading code from drive
X_train = load('/content/drive/My Drive/X_train.npz')
X_train = X_train.f.arr_0
X_test = load('/content/drive/My Drive/X_test.npz')
X_test = X_test.f.arr_0
y_train = load('/content/drive/My Drive/y_train.npz')
y_train = y_train.f.arr_0
y_test = load('/content/drive/My Drive/y_test.npz')
y_test = y_test.f.arr_0
####### end of loading code_____no_output_____!cp -r "/content/drive/My Drive/X_train.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/X_test.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/y_train.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/y_test.npz" ./y_test.npz_____no_output_____####### loading code from vm
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
####### end of loading code_____no_output_____# for shuffled data
!cp -r "/content/drive/My Drive/X_train_shuffled.npz" ./X_train.npz
!cp -r "/content/drive/My Drive/X_test_shuffled.npz" ./X_test.npz
!cp -r "/content/drive/My Drive/y_train_shuffled.npz" ./y_train.npz
!cp -r "/content/drive/My Drive/y_test_shuffled.npz" ./y_test.npz_____no_output_____# for shuffled data
####### loading code from vm
X_train = load('./X_train.npz')
X_train = X_train.f.arr_0
X_test = load('./X_test.npz')
X_test = X_test.f.arr_0
y_train = load('./y_train.npz')
y_train = y_train.f.arr_0
y_test = load('./y_test.npz')
y_test = y_test.f.arr_0
####### end of loading code_____no_output_____
</code>
| {
"repository": "leemjm92/dsi15_capstone_steering_wheel_prediction",
"path": ".ipynb_checkpoints/02_modeling_model_5-checkpoint.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 996331,
"hexsha": "d0df5b43eced3c6d9e0cfe90fc8ee37f6fcdb6b5",
"max_line_length": 69604,
"avg_line_length": 125.9424851473,
"alphanum_fraction": 0.8114953765
} |
# Notebook from CyberCRI/herocoli-metrics-redwire
Path: v1.52/Tests/1.3 Google form analysis visualizations.ipynb
# Google form analysis visualizations_____no_output_____## Table of Contents
['Google form analysis' functions checks](#funcchecks)
['Google form analysis' functions tinkering](#functinkering)_____no_output_____
<code>
%run "../Functions/1. Google form analysis.ipynb"_____no_output_____
</code>
## 'Google form analysis' functions checks
<a id=funcchecks />_____no_output_____## 'Google form analysis' functions tinkering
<a id=functinkering />_____no_output_____
<code>
binarizedAnswers = plotBasicStats(getSurveysOfBiologists(gform), 'non biologists', includeUndefined = True)_____no_output_____gform.loc[:, [localplayerguidkey, 'Temporality']].groupby('Temporality').count()_____no_output_____#sample = gform.copy()
samples = [
[gform.copy(), 'complete set'],
[gform[gform['Language'] == 'en'], 'English'],
[gform[gform['Language'] == 'fr'], 'French'],
[gform[gform['What is your gender?'] == 'Female'], 'female'],
[gform[gform['What is your gender?'] == 'Male'], 'male'],
[getSurveysOfUsersWhoAnsweredBoth(gform), 'answered both'],
[getSurveysOfUsersWhoAnsweredBoth(gform[gform['Language'] == 'en']), 'answered both, en'],
[getSurveysOfUsersWhoAnsweredBoth(gform[gform['Language'] == 'fr']), 'answered both, fr'],
[getSurveysOfUsersWhoAnsweredBoth(gform[gform['What is your gender?'] == 'Female']), 'answered both, female'],
[getSurveysOfUsersWhoAnsweredBoth(gform[gform['What is your gender?'] == 'Male']), 'answered both, male'],
]
_progress = FloatProgress(min=0, max=len(samples))
display(_progress)
includeAll = False
includeBefore = True
includeAfter = True
includeUndefined = False
includeProgress = True
includeRelativeProgress = False
for sample, title in samples:
## basic stats:
### mean score
### median score
### std
## sample can be: all, those who answered both before and after,
## those who played between date1 and date2, ...
#def plotBasicStats(sample, title, includeAll, includeBefore, includeAfter, includeUndefined, includeProgress, includeRelativeProgress):
stepsPerInclude = 2
includeCount = np.sum([includeAll, includeBefore, includeAfter, includeUndefined, includeProgress])
stepsCount = stepsPerInclude*includeCount + 3
#print("stepsPerInclude=" + str(stepsPerInclude))
#print("includeCount=" + str(includeCount))
#print("stepsCount=" + str(stepsCount))
__progress = FloatProgress(min=0, max=stepsCount)
display(__progress)
sampleBefore = sample[sample['Temporality'] == 'before']
sampleAfter = sample[sample['Temporality'] == 'after']
sampleUndefined = sample[sample['Temporality'] == 'undefined']
#uniqueBefore = sampleBefore[localplayerguidkey]
#uniqueAfter =
#uniqueUndefined =
scientificQuestions = correctAnswers.copy()
allQuestions = correctAnswers + demographicAnswers
categories = ['all', 'before', 'after', 'undefined', 'progress', 'rel. progress']
data = {}
sciBinarized = pd.DataFrame()
allBinarized = pd.DataFrame()
scoresAll = pd.DataFrame()
sciBinarizedBefore = pd.DataFrame()
allBinarizedBefore = pd.DataFrame()
scoresBefore = pd.DataFrame()
sciBinarizedAfter = pd.DataFrame()
allBinarizedAfter = pd.DataFrame()
scoresAfter = pd.DataFrame()
sciBinarizedUndefined = pd.DataFrame()
allBinarizedUndefined = pd.DataFrame()
scoresUndefined = pd.DataFrame()
scoresProgress = pd.DataFrame()
## basic stats:
### mean score
### median score
### std
if includeAll:
sciBinarized = getAllBinarized( _source = scientificQuestions, _form = sample)
__progress.value += 1
allBinarized = getAllBinarized( _source = allQuestions, _form = sample)
__progress.value += 1
scoresAll = pd.Series(np.dot(sciBinarized, np.ones(sciBinarized.shape[1])))
data[categories[0]] = createStatSet(scoresAll, sample[localplayerguidkey])
if includeBefore or includeProgress:
sciBinarizedBefore = getAllBinarized( _source = scientificQuestions, _form = sampleBefore)
__progress.value += 1
allBinarizedBefore = getAllBinarized( _source = allQuestions, _form = sampleBefore)
__progress.value += 1
scoresBefore = pd.Series(np.dot(sciBinarizedBefore, np.ones(sciBinarizedBefore.shape[1])))
temporaryStatSetBefore = createStatSet(scoresBefore, sampleBefore[localplayerguidkey])
if includeBefore:
data[categories[1]] = temporaryStatSetBefore
if includeAfter or includeProgress:
sciBinarizedAfter = getAllBinarized( _source = scientificQuestions, _form = sampleAfter)
__progress.value += 1
allBinarizedAfter = getAllBinarized( _source = allQuestions, _form = sampleAfter)
__progress.value += 1
scoresAfter = pd.Series(np.dot(sciBinarizedAfter, np.ones(sciBinarizedAfter.shape[1])))
temporaryStatSetAfter = createStatSet(scoresAfter, sampleAfter[localplayerguidkey])
if includeAfter:
data[categories[2]] = temporaryStatSetAfter
if includeUndefined:
sciBinarizedUndefined = getAllBinarized( _source = scientificQuestions, _form = sampleUndefined)
__progress.value += 1
allBinarizedUndefined = getAllBinarized( _source = allQuestions, _form = sampleUndefined)
__progress.value += 1
scoresUndefined = pd.Series(np.dot(sciBinarizedUndefined, np.ones(sciBinarizedUndefined.shape[1])))
data[categories[3]] = createStatSet(scoresUndefined, sampleUndefined[localplayerguidkey])
if includeProgress:
data[categories[4]] = {
'count' : min(temporaryStatSetAfter['count'], temporaryStatSetBefore['count']),
'unique' : min(temporaryStatSetAfter['unique'], temporaryStatSetBefore['unique']),
'median' : temporaryStatSetAfter['median']-temporaryStatSetBefore['median'],
'mean' : temporaryStatSetAfter['mean']-temporaryStatSetBefore['mean'],
'std' : temporaryStatSetAfter['std']-temporaryStatSetBefore['std'],
}
__progress.value += 2
result = pd.DataFrame(data)
__progress.value += 1
print(title)
print(result)
if (includeBefore and includeAfter) or includeProgress:
if (len(scoresBefore) > 2 and len(scoresAfter) > 2):
ttest = ttest_ind(scoresBefore, scoresAfter)
print("t test: statistic=" + repr(ttest.statistic) + " pvalue=" + repr(ttest.pvalue))
print()
## percentage correct
### percentage correct - max 5 columns
percentagePerQuestionAll = pd.DataFrame()
percentagePerQuestionBefore = pd.DataFrame()
percentagePerQuestionAfter = pd.DataFrame()
percentagePerQuestionUndefined = pd.DataFrame()
percentagePerQuestionProgress = pd.DataFrame()
tables = []
if includeAll:
percentagePerQuestionAll = getPercentagePerQuestion(allBinarized)
tables.append([percentagePerQuestionAll, categories[0]])
if includeBefore or includeProgress:
percentagePerQuestionBefore = getPercentagePerQuestion(allBinarizedBefore)
if includeBefore:
tables.append([percentagePerQuestionBefore, categories[1]])
if includeAfter or includeProgress:
percentagePerQuestionAfter = getPercentagePerQuestion(allBinarizedAfter)
if includeAfter:
tables.append([percentagePerQuestionAfter, categories[2]])
if includeUndefined:
percentagePerQuestionUndefined = getPercentagePerQuestion(allBinarizedUndefined)
tables.append([percentagePerQuestionUndefined, categories[3]])
if includeProgress or includeRelativeProgress:
percentagePerQuestionProgress = percentagePerQuestionAfter - percentagePerQuestionBefore
if includeProgress:
tables.append([percentagePerQuestionProgress, categories[4]])
if includeRelativeProgress:
# use temporaryStatSetAfter['count'], temporaryStatSetBefore['count']?
percentagePerQuestionProgress2 = percentagePerQuestionProgress.copy()
for index in range(0,len(percentagePerQuestionProgress.index)):
if (0 == percentagePerQuestionBefore.iloc[index,0]):
percentagePerQuestionProgress2.iloc[index,0] = 0
else:
percentagePerQuestionProgress2.iloc[index,0] = \
percentagePerQuestionProgress.iloc[index,0]/percentagePerQuestionBefore.iloc[index,0]
tables.append([percentagePerQuestionProgress2, categories[5]])
__progress.value += 1
graphTitle = '% correct: '
toConcat = []
for table,category in tables:
concat = (len(table.values) > 0)
for elt in table.iloc[:,0].values:
if np.isnan(elt):
concat = False
break
if(concat):
graphTitle = graphTitle + category + ' '
toConcat.append(table)
if (len(toConcat) > 0):
percentagePerQuestionConcatenated = pd.concat(
toConcat
, axis=1)
if(len(title) > 0):
graphTitle = graphTitle + ' - ' + title
_fig = plt.figure(figsize=(20,20))
_ax1 = plt.subplot(111)
_ax1.set_title(graphTitle)
sns.heatmap(percentagePerQuestionConcatenated.round().astype(int),ax=_ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d')
__progress.value += 1
### percentage cross correct
### percentage cross correct, conditionnally
if(__progress.value != stepsCount):
print("__progress.value=" + str(__progress.value) + " != stepsCount=" + str(stepsCount))
_progress.value += 1
if(_progress.value != len(samples)):
print("__progress.value=" + str(__progress.value) + " != len(samples)=" + str(len(samples)))
# sciBinarized, sciBinarizedBefore, sciBinarizedAfter, sciBinarizedUndefined, \
# allBinarized, allBinarizedBefore, allBinarizedAfter, allBinarizedUndefined_____no_output_____ttest = ttest_ind(scoresBefore, scoresAfter)
type(scoresBefore), len(scoresBefore),\
type(scoresAfter), len(scoresAfter),\
ttest_____no_output_____type(tables)_____no_output_____sciBinarized = getAllBinarized( _source = scientificQuestions, _form = sample)
series = pd.Series(np.dot(sciBinarized, np.ones(sciBinarized.shape[1])))
#ids = pd.Series()
ids = sample[localplayerguidkey]
#def createStatSet(series, ids):
if(0 == len(ids)):
ids = series.index
result = {
'count' : len(ids),
'unique' : len(ids.unique()),
'median' : series.median(),
'mean' : series.mean(),
'std' : series.std()}
result_____no_output_____## percentage correct
### percentage correct - 3 columns
### percentage cross correct
### percentage cross correct, conditionnally_____no_output_____#_binarized = allBinarized
#_binarized = allBinarizedUndefined
_binarized = allBinarizedBefore
#def getPercentagePerQuestion(_binarized):
totalPerQuestionDF = pd.DataFrame(data=np.dot(np.ones(_binarized.shape[0]), _binarized), index=_binarized.columns)
percentagePerQuestion = totalPerQuestionDF*100 / _binarized.shape[0]
percentagePerQuestion_____no_output_____#totalPerQuestion = np.dot(np.ones(allSciBinarized.shape[0]), allSciBinarized)
#totalPerQuestion.shape
totalPerQuestionSci = np.dot(np.ones(sciBinarized.shape[0]), sciBinarized)
totalPerQuestionAll = np.dot(np.ones(allBinarized.shape[0]), allBinarized)
percentagePerQuestionAll = getPercentagePerQuestion(allBinarized)
percentagePerQuestionBefore = getPercentagePerQuestion(allBinarizedBefore)
percentagePerQuestionAfter = getPercentagePerQuestion(allBinarizedAfter)
percentagePerQuestionUndefined = getPercentagePerQuestion(allBinarizedUndefined)
percentagePerQuestionConcatenated = pd.concat(
[
percentagePerQuestionAll,
percentagePerQuestionBefore,
percentagePerQuestionAfter,
percentagePerQuestionUndefined,
]
, axis=1)
_fig = plt.figure(figsize=(20,20))
_ax1 = plt.subplot(111)
_ax1.set_title('percentage correct per question: all, before, after, undefined')
sns.heatmap(percentagePerQuestionConcatenated.round().astype(int),ax=_ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d')_____no_output_____samples = [gform, gform[gform['Language'] == 'en'], gform[gform['Language'] == 'fr'],
getSurveysOfUsersWhoAnsweredBoth(gform),
getSurveysOfUsersWhoAnsweredBoth(gform[gform['Language'] == 'en']),
getSurveysOfUsersWhoAnsweredBoth(gform[gform['Language'] == 'fr'])]
for sample in samples:
sciBinarized, sciBinarizedBefore, sciBinarizedAfter, sciBinarizedUndefined, \
allBinarized, allBinarizedBefore, allBinarizedAfter, allBinarizedUndefined = plotBasicStats(sample)_____no_output_____
</code>
### abandoned algorithms_____no_output_____
<code>
#totalPerQuestion = np.dot(np.ones(sciBinarized.shape[0]), sciBinarized)
#totalPerQuestion.shape
totalPerQuestionSci = np.dot(np.ones(sciBinarized.shape[0]), sciBinarized)
totalPerQuestionAll = np.dot(np.ones(allBinarized.shape[0]), allBinarized)
totalPerQuestionDFAll = pd.DataFrame(data=np.dot(np.ones(allBinarized.shape[0]), allBinarized), index=allBinarized.columns)
percentagePerQuestionAll = totalPerQuestionDFAll*100 / allBinarized.shape[0]
#totalPerQuestionDF
#percentagePerQuestion
#before
totalPerQuestionDFBefore = pd.DataFrame(
data=np.dot(np.ones(allBinarizedBefore.shape[0]), allBinarizedBefore), index=allBinarizedBefore.columns
)
percentagePerQuestionBefore = totalPerQuestionDFBefore*100 / allBinarizedBefore.shape[0]
#after
totalPerQuestionDFAfter = pd.DataFrame(
data=np.dot(np.ones(allBinarizedAfter.shape[0]), allBinarizedAfter), index=allBinarizedAfter.columns
)
percentagePerQuestionAfter = totalPerQuestionDFAfter*100 / allBinarizedAfter.shape[0]
_fig = plt.figure(figsize=(20,20))
ax1 = plt.subplot(131)
ax2 = plt.subplot(132)
ax3 = plt.subplot(133)
ax2.get_yaxis().set_visible(False)
ax3.get_yaxis().set_visible(False)
sns.heatmap(percentagePerQuestionAll.round().astype(int),ax=ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionBefore.round().astype(int),ax=ax2,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionAfter.round().astype(int),ax=ax3,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=True)
ax1.set_title('percentage correct per question - all')
ax2.set_title('percentage correct per question - before')
ax3.set_title('percentage correct per question - after')
# Fine-tune figure; make subplots close to each other and hide x ticks for
# all but bottom plot.
_fig.tight_layout()
_fig = plt.figure(figsize=(20,20))
ax1 = plt.subplot(131)
ax2 = plt.subplot(132)
ax3 = plt.subplot(133)
ax2.get_yaxis().set_visible(False)
ax3.get_yaxis().set_visible(False)
sns.heatmap(percentagePerQuestionAll.round().astype(int),ax=ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionBefore.round().astype(int),ax=ax2,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionAfter.round().astype(int),ax=ax3,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=True)
ax1.set_title('percentage correct per question - all')
ax2.set_title('percentage correct per question - before')
ax3.set_title('percentage correct per question - after')
# Fine-tune figure; make subplots close to each other and hide x ticks for
# all but bottom plot.
_fig.tight_layout()_____no_output______fig = plt.figure(figsize=(20,20))
ax1 = plt.subplot(131)
ax2 = plt.subplot(132)
ax3 = plt.subplot(133)
ax2.get_yaxis().set_visible(False)
ax3.get_yaxis().set_visible(False)
sns.heatmap(percentagePerQuestionAll.round().astype(int),ax=ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionBefore.round().astype(int),ax=ax2,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=False)
sns.heatmap(percentagePerQuestionAfter.round().astype(int),ax=ax3,cmap=plt.cm.jet,square=True,annot=True,fmt='d', cbar=True)
ax1.set_title('percentage correct per question - all')
ax2.set_title('percentage correct per question - before')
ax3.set_title('percentage correct per question - after')
# Fine-tune figure; make subplots close to each other and hide x ticks for
# all but bottom plot.
_fig.tight_layout()_____no_output_____percentagePerQuestionConcatenated = pd.concat([
percentagePerQuestionAll,
percentagePerQuestionBefore,
percentagePerQuestionAfter]
, axis=1)
_fig = plt.figure(figsize=(20,20))
_ax1 = plt.subplot(111)
_ax1.set_title('percentage correct per question: all, before, after')
sns.heatmap(percentagePerQuestionConcatenated.round().astype(int),ax=_ax1,cmap=plt.cm.jet,square=True,annot=True,fmt='d')_____no_output_____
</code>
### sample getters tinkering_____no_output_____
<code>
##### getRMAfter / Before tinkering
#def getRMAfters(sample):
afters = sample[sample['Temporality'] == 'after']
#def getRMBefores(sample):
befores = sample[sample['Temporality'] == 'before']_____no_output_____QPlayed1 = 'Have you ever played an older version of Hero.Coli before?'
QPlayed2 = 'Have you played the current version of Hero.Coli?'
QPlayed3 = 'Have you played the arcade cabinet version of Hero.Coli?'
QPlayed4 = 'Have you played the Android version of Hero.Coli?'_____no_output_____
</code>
#### set operators_____no_output_____
<code>
# equality tests
#(sample1.columns == sample2.columns).all()
#sample1.columns.duplicated().any() or sample2.columns.duplicated().any()
#pd.concat([sample1, sample2], axis=1).columns.duplicated().any()_____no_output_____
</code>
##### getUnionQuestionnaires tinkering_____no_output_____
<code>
sample1 = befores
sample2 = afters
#def getUnionQuestionnaires(sample1, sample2):
if (not (sample1.columns == sample2.columns).all()):
print("warning: parameter columns are not the same")
result = pd.concat([sample1, sample2]).drop_duplicates()_____no_output_____
</code>
##### getIntersectionQuestionnaires tinkering_____no_output_____
<code>
sample1 = befores[:15]
sample2 = befores[10:]
#def getIntersectionQuestionnaires(sample1, sample2):
if (not (sample1.columns == sample2.columns).all()):
print("warning: parameter columns are not the same")
result = pd.merge(sample1, sample2, how = 'inner').drop_duplicates()_____no_output_____
</code>
##### getIntersectionUsersSurveys tinkering_____no_output_____
<code>
sample1 = befores
sample2 = afters
# get sample1 and sample2 rows where users are common to sample1 and sample2
#def getIntersectionUsersSurveys(sample1, sample2):
result1 = sample1[sample1[localplayerguidkey].isin(sample2[localplayerguidkey])]
result2 = sample2[sample2[localplayerguidkey].isin(sample1[localplayerguidkey])]
result = getUnionQuestionnaires(result1,result2)_____no_output_____len(sample1), len(sample2), len(result)_____no_output_____
</code>
##### getGFormBefores tinkering_____no_output_____
<code>
sample = gform
# returns users who declared that they have never played the game, whatever platform
# previousPlayPositives is defined in '../Static data/English localization.ipynb'
#def getGFormBefores(sample):
befores = sample[
~sample[QPlayed1].isin(previousPlayPositives)
& ~sample[QPlayed2].isin(previousPlayPositives)
& ~sample[QPlayed3].isin(previousPlayPositives)
& ~sample[QPlayed4].isin(previousPlayPositives)
]
len(befores)_____no_output_____
</code>
##### getGFormAfters tinkering_____no_output_____
<code>
sample = gform
# returns users who declared that they have already played the game, whatever platform
# previousPlayPositives is defined in '../Static data/English localization.ipynb'
#def getGFormAfters(sample):
afters = sample[
sample[QPlayed1].isin(previousPlayPositives)
| sample[QPlayed2].isin(previousPlayPositives)
| sample[QPlayed3].isin(previousPlayPositives)
| sample[QPlayed4].isin(previousPlayPositives)
]
len(afters)_____no_output_____
</code>
##### getGFormTemporality tinkering_____no_output_____
<code>
_GFUserId = getSurveysOfBiologists(gform)[localplayerguidkey].iloc[3]
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
sample = gform_____no_output_____answerTemporalities[1]_____no_output_____#while result != 'after':
_GFUserId = getRandomGFormGUID()
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
# returns an element of answerTemporalities
# previousPlayPositives is defined in '../Static data/English localization.ipynb'
#def getGFormRowGFormTemporality(_gformRow):
result = answerTemporalities[2]
if (_gformRow[QPlayed1] in previousPlayPositives)\
or (_gformRow[QPlayed2] in previousPlayPositives)\
or (_gformRow[QPlayed3] in previousPlayPositives)\
or (_gformRow[QPlayed4] in previousPlayPositives):
result = answerTemporalities[1]
else:
result = answerTemporalities[0]
result_____no_output_____
</code>
#### getSurveysOfUsersWhoAnsweredBoth tinkering_____no_output_____
<code>
sample = gform
gfMode = True
rmMode = False
#def getSurveysOfUsersWhoAnsweredBoth(sample, gfMode = True, rmMode = False):
befores = sample
afters = sample
if gfMode:
befores = getGFormBefores(befores)
afters = getGFormAfters(afters)
if rmMode:
befores = getRMBefores(befores)
afters = getRMAfters(afters)
result = getIntersectionUsersSurveys(befores, afters)
((len(getGFormBefores(sample)),\
len(getRMBefores(sample)),\
len(befores)),\
(len(getGFormAfters(sample)),\
len(getRMAfters(sample)),\
len(afters)),\
len(result)),\
\
((getUniqueUserCount(getGFormBefores(sample)),\
getUniqueUserCount(getRMBefores(sample)),\
getUniqueUserCount(befores)),\
(getUniqueUserCount(getGFormAfters(sample)),\
getUniqueUserCount(getRMAfters(sample)),\
getUniqueUserCount(afters)),\
getUniqueUserCount(result))
_____no_output_____len(getSurveysOfUsersWhoAnsweredBoth(gform, gfMode = True, rmMode = True)[localplayerguidkey])_____no_output_____
</code>
#### getSurveysThatAnswered tinkering_____no_output_____
<code>
sample = gform
#_GFUserId = getSurveysOfBiologists(gform)[localplayerguidkey].iloc[1]
#sample = gform[gform[localplayerguidkey] == _GFUserId]
hardPolicy = True
questionsAndPositiveAnswers = [[Q6BioEdu, biologyStudyPositives],
[Q8SynBio, yesNoIdontknowPositives],
[Q9BioBricks, yesNoIdontknowPositives]]
#def getSurveysThatAnswered(sample, questionsAndPositiveAnswers, hardPolicy = True):
filterSeries = []
if hardPolicy:
filterSeries = pd.Series(True, sample.index)
for question, positiveAnswers in questionsAndPositiveAnswers:
filterSeries = filterSeries & (sample[question].isin(positiveAnswers))
else:
filterSeries = pd.Series(False, sample.index)
for question, positiveAnswers in questionsAndPositiveAnswers:
filterSeries = filterSeries | (sample[question].isin(positiveAnswers))
result = sample[filterSeries]_____no_output_____
</code>
#### getSurveysOfBiologists tinkering_____no_output_____
<code>
sample = gform
hardPolicy = True
#def getSurveysOfBiologists(sample, hardPolicy = True):
Q6BioEdu = 'How long have you studied biology?' #biologyStudyPositives
#irrelevant QInterest 'Are you interested in biology?' #biologyInterestPositives
Q8SynBio = 'Before playing Hero.Coli, had you ever heard about synthetic biology?' #yesNoIdontknowPositives
Q9BioBricks = 'Before playing Hero.Coli, had you ever heard about BioBricks?' #yesNoIdontknowPositives
questionsAndPositiveAnswers = [[Q6BioEdu, biologyStudyPositives],
[Q8SynBio, yesNoIdontknowPositives],
[Q9BioBricks, yesNoIdontknowPositives]]
result = getSurveysThatAnswered(sample, questionsAndPositiveAnswers, hardPolicy)
print(len(result) > 0)_____no_output_____gform.index_____no_output_____len(result)_____no_output______GFUserId = getSurveysOfBiologists(gform)[localplayerguidkey].iloc[0]
sample = gform[gform[localplayerguidkey] == _GFUserId]
len(getSurveysOfBiologists(sample)) > 0_____no_output_____
</code>
#### getSurveysOfGamers tinkering_____no_output_____
<code>
sample = gform
hardPolicy = True
#def getSurveysOfGamers(sample, hardPolicy = True):
Q2Interest = 'Are you interested in video games?' #interestPositives
Q3Play = 'Do you play video games?' #frequencyPositives
questionsAndPositiveAnswers = [[Q2Interest, interestPositives], [Q3Play, frequencyPositives]]
result = getSurveysThatAnswered(sample, questionsAndPositiveAnswers, hardPolicy)_____no_output_____len(result)_____no_output_____type(filterSeries)_____no_output_____len(afters[afters[QPlayed1].isin(previousPlayPositives)
| afters[QPlayed2].isin(previousPlayPositives)
| afters[QPlayed3].isin(previousPlayPositives)
| afters[QPlayed4].isin(previousPlayPositives)
]),\
len(afters[afters[QPlayed1].isin(previousPlayPositives)]),\
len(afters[afters[QPlayed2].isin(previousPlayPositives)]),\
len(afters[afters[QPlayed3].isin(previousPlayPositives)]),\
len(afters[afters[QPlayed4].isin(previousPlayPositives)])_____no_output_____
</code>
#### getSurveysWithMatchingAnswers tinkering_____no_output_____
<code>
_GFUserId = getSurveysOfBiologists(gform)[localplayerguidkey].iloc[2]
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
sample = gform_____no_output_____sample = gform
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
hardPolicy = False
Q4 = 'How old are you?'
Q5 = 'What is your gender?'
Q2Interest = 'Are you interested in video games?'
Q3Play = 'Do you play video games?'
Q6BioEdu = 'How long have you studied biology?'
Q7BioInterest = 'Are you interested in biology?'
Q8SynBio = 'Before playing Hero.Coli, had you ever heard about synthetic biology?'
Q9BioBricks = 'Before playing Hero.Coli, had you ever heard about BioBricks?'
Q42 = 'Language'
strictList = [Q4, Q5]
extendedList = [Q2Interest, Q3Play, Q6BioEdu, Q8SynBio, Q9BioBricks, Q42]
#def getSurveysWithMatchingAnswers(sample, _gformRow, strictList, extendedList = [], hardPolicy = False):
questions = strictList
if (hardPolicy):
questions += extendedList
questionsAndPositiveAnswers = []
for q in questions:
questionsAndPositiveAnswers.append([q, [_gformRow[q]]])
getSurveysThatAnswered(sample, questionsAndPositiveAnswers, True)_____no_output_____
</code>
#### getMatchingDemographics tinkering_____no_output_____
<code>
sample = gform
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
hardPolicy = True
#def getMatchingDemographics(sample, _gformRow, hardPolicy = False):
# age and gender
Q4 = 'How old are you?'
Q5 = 'What is your gender?'
# interests, hobbies, and knowledge - evaluation may vary after playing
Q2Interest = 'Are you interested in video games?'
Q3Play = 'Do you play video games?'
Q6BioEdu = 'How long have you studied biology?'
Q7BioInterest = 'Are you interested in biology?'
Q8SynBio = 'Before playing Hero.Coli, had you ever heard about synthetic biology?'
Q9BioBricks = 'Before playing Hero.Coli, had you ever heard about BioBricks?'
# language may vary: players may have missed the opportunity to set it, or may want to try and change it
Q42 = 'Language'
getSurveysWithMatchingAnswers(
sample,
_gformRow, [Q4, Q5],
extendedList = [Q2Interest, Q3Play, Q6BioEdu, Q8SynBio, Q9BioBricks, Q42],
hardPolicy = hardPolicy
)_____no_output_____questionsAndPositiveAnswers_____no_output_____
</code>
#### getGFormRowCorrection tinkering_____no_output_____
<code>
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
_source = correctAnswers
#def getGFormRowCorrection( _gformRow, _source = correctAnswers):
result = _gformRow.copy()
if(len(_gformRow) == 0):
print("this gform row is empty")
else:
result = pd.Series(index = _gformRow.index, data = np.full(len(_gformRow), np.nan))
for question in result.index:
_correctAnswers = _source.loc[question]
if(len(_correctAnswers) > 0):
result.loc[question] = False
for _correctAnswer in _correctAnswers:
if str(_gformRow.loc[question]).startswith(str(_correctAnswer)):
result.loc[question] = True
break
result_____no_output_____
</code>
#### getGFormRowScore tinkering_____no_output_____
<code>
_gformRow = gform[gform[localplayerguidkey] == _GFUserId].iloc[0]
_source = correctAnswers
#def getGFormRowScore( _gformRow, _source = correctAnswers):
correction = getGFormRowCorrection( _gformRow, _source = _source)
_counts = correction.value_counts()
_thisScore = 0
if(True in _counts):
_thisScore = _counts[True]
_thisScore_____no_output_____
</code>
#### getGFormDataPreview tinkering_____no_output_____
<code>
_GFUserId = getSurveysOfBiologists(gform)[localplayerguidkey].iloc[2]
sample = gform
# for per-gform, manual analysis
#def getGFormDataPreview(_GFUserId, sample):
gforms = gform[gform[localplayerguidkey] == _GFUserId]
result = {}
for _ilocIndex in range(0, len(gforms)):
gformsIndex = gforms.index[_ilocIndex]
currentGForm = gforms.iloc[_ilocIndex]
subresult = {}
subresult['date'] = currentGForm['Timestamp']
subresult['temporality RM'] = currentGForm['Temporality']
subresult['temporality GF'] = getGFormRowGFormTemporality(currentGForm)
subresult['score'] = getGFormRowScore(currentGForm)
subresult['genderAge'] = [currentGForm['What is your gender?'], currentGForm['How old are you?']]
# search for other users with similar demographics
matchingDemographics = getMatchingDemographics(sample, currentGForm)
matchingDemographicsIds = []
#print(type(matchingDemographics))
#print(matchingDemographics.index)
for matchesIndex in matchingDemographics.index:
matchingDemographicsIds.append([matchesIndex, matchingDemographics.loc[matchesIndex, localplayerguidkey]])
subresult['demographic matches'] = matchingDemographicsIds
result['survey' + str(_ilocIndex)] = subresult
print(result)_____no_output_____for match in result['survey0']['demographic matches']:
print(match[0])_____no_output_____
</code>
| {
"repository": "CyberCRI/herocoli-metrics-redwire",
"path": "v1.52/Tests/1.3 Google form analysis visualizations.ipynb",
"matched_keywords": [
"synthetic biology",
"biology"
],
"stars": 1,
"size": 43273,
"hexsha": "d0e150ac7c38a82487d22dd68aea05c6d195ce1d",
"max_line_length": 149,
"avg_line_length": 33.7542901716,
"alphanum_fraction": 0.5864626904
} |
# Notebook from shiwei23/STORM6
Path: jupyter/Generate_dave_xml_DNA_IDEX.ipynb
<code>
from xml.etree import ElementTree
from xml.dom import minidom
from xml.etree.ElementTree import Element, SubElement, Comment, indent
def prettify(elem):
"""Return a pretty-printed XML string for the Element.
"""
rough_string = ElementTree.tostring(elem, encoding="ISO-8859-1")
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent="\t")_____no_output_____import numpy as np
import os
valve_start = 1
hyb_start = 51
reg_start = 1
num_rounds = 14
data_type = 'U'
valve_ids = np.arange(valve_start, valve_start + num_rounds)
hyb_ids = np.arange(hyb_start, hyb_start + num_rounds)
reg_names = [f'{data_type}{_i}' for _i in np.arange(reg_start, reg_start + num_rounds)]_____no_output_____source_folder = r'D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229'
#target_drive = r'\\KOLMOGOROV\Chromatin_NAS_4'
target_drive = r'\\10.245.74.158\Chromatin_NAS_1'
# imaging protocol
imaging_protocol = r'Zscan_750_647_561_s50_n250_10Hz'
# reference imaging protocol
add_ref_hyb = False
ref_imaging_protocol = r'Zscan_750_647_561_405_s50_n250_10Hz'
ref_hyb = 0
# bleach protocol
bleach = True
bleach_protocol = r'Bleach_750_647_s5'_____no_output_____cmd_seq = Element('command_sequence')
if add_ref_hyb:
# add hyb 0
# comments
comment = Comment(f"Hyb 0")
cmd_seq.append(comment)
# flow imaging buffer
imaging = SubElement(cmd_seq, 'valve_protocol')
imaging.text = f"Flow Imaging Buffer"
# change directory
change_dir = SubElement(cmd_seq, 'change_directory')
change_dir.text = os.path.join(source_folder, f"H0C1")
# wakeup
wakeup = SubElement(cmd_seq, 'wakeup')
wakeup.text = "5000"
# imaging loop
_im_p = ref_imaging_protocol
loop = SubElement(cmd_seq, 'loop', name='Position Loop Zscan', increment="name")
loop_item = SubElement(loop, 'item', name=_im_p)
loop_item.text = " "
# delay time
delay = SubElement(cmd_seq, 'delay')
delay.text = "2000"
# copy folder
copy_dir = SubElement(cmd_seq, 'copy_directory')
source_dir = SubElement(copy_dir, 'source_path')
source_dir.text = change_dir.text
target_dir = SubElement(copy_dir, 'target_path')
target_dir.text = os.path.join(target_drive,
os.path.basename(os.path.dirname(source_dir.text)),
os.path.basename(source_dir.text))
del_source = SubElement(copy_dir, 'delete_source')
del_source.text = "True"
for _i, (_vid, _hid, _rname) in enumerate(zip(valve_ids, hyb_ids, reg_names)):
# select protocol
_im_p = imaging_protocol
# TCEP
tcep = SubElement(cmd_seq, 'valve_protocol')
tcep.text = "Flow TCEP"
# wash tcep
tcep_wash = SubElement(cmd_seq, 'valve_protocol')
tcep_wash.text = "Flow Wash Buffer"
# comments
comment = Comment(f"Hyb {_hid} with {_vid} for {_rname}")
cmd_seq.append(comment)
# flow adaptor
adt = SubElement(cmd_seq, 'valve_protocol')
adt.text = f"Hybridize {_vid}"
if bleach:
# delay time
delay = SubElement(cmd_seq, 'delay')
delay.text = "3000"
# change directory
bleach_change_dir = SubElement(cmd_seq, 'change_directory')
bleach_change_dir.text = os.path.join(source_folder, f"Bleach")
# wakeup
bleach_wakeup = SubElement(cmd_seq, 'wakeup')
bleach_wakeup.text = "5000"
# imaging loop
bleach_loop = SubElement(cmd_seq, 'loop', name='Position Loop Zscan', increment="name")
bleach_loop_item = SubElement(bleach_loop, 'item', name=bleach_protocol)
bleach_loop_item.text = " "
# delay time
delay = SubElement(cmd_seq, 'delay')
delay.text = "3000"
else:
# delay time
adt_incubation = SubElement(cmd_seq, 'valve_protocol')
adt_incubation.text = f"Incubate 10min"
# wash
wash = SubElement(cmd_seq, 'valve_protocol')
wash.text = "Flow Wash Buffer"
# readouts
readouts = SubElement(cmd_seq, 'valve_protocol')
readouts.text = "Flow RNA common readouts"
# incubate readouts
readout_incubation = SubElement(cmd_seq, 'valve_protocol')
readout_incubation.text = f"Incubate 10min"
# wash readouts
readout_wash = SubElement(cmd_seq, 'valve_protocol')
readout_wash.text = f"Flow Wash Buffer"
# flow imaging buffer
imaging = SubElement(cmd_seq, 'valve_protocol')
imaging.text = f"Flow Imaging Buffer"
# change directory
change_dir = SubElement(cmd_seq, 'change_directory')
change_dir.text = os.path.join(source_folder, f"H{_hid}{_rname.upper()}")
# wakeup
wakeup = SubElement(cmd_seq, 'wakeup')
wakeup.text = "5000"
# imaging loop
loop = SubElement(cmd_seq, 'loop', name='Position Loop Zscan', increment="name")
loop_item = SubElement(loop, 'item', name=_im_p)
loop_item.text = " "
# delay time
delay = SubElement(cmd_seq, 'delay')
delay.text = "2000"
# copy folder
copy_dir = SubElement(cmd_seq, 'copy_directory')
source_dir = SubElement(copy_dir, 'source_path')
source_dir.text = change_dir.text#cmd_seq.findall('change_directory')[-1].text
target_dir = SubElement(copy_dir, 'target_path')
target_dir.text = os.path.join(target_drive,
os.path.basename(os.path.dirname(source_dir.text)),
os.path.basename(source_dir.text))
del_source = SubElement(copy_dir, 'delete_source')
del_source.text = "True"
# empty line
indent(target_dir)
final_str = prettify(cmd_seq)
print( final_str )<?xml version="1.0" ?>
<command_sequence>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 51 with 1 for U1-->
<valve_protocol>Hybridize 1</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H51U1</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H51U1</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H51U1</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 52 with 2 for U2-->
<valve_protocol>Hybridize 2</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H52U2</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H52U2</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H52U2</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 53 with 3 for U3-->
<valve_protocol>Hybridize 3</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H53U3</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H53U3</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H53U3</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 54 with 4 for U4-->
<valve_protocol>Hybridize 4</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H54U4</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H54U4</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H54U4</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 55 with 5 for U5-->
<valve_protocol>Hybridize 5</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H55U5</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H55U5</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H55U5</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 56 with 6 for U6-->
<valve_protocol>Hybridize 6</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H56U6</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H56U6</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H56U6</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 57 with 7 for U7-->
<valve_protocol>Hybridize 7</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H57U7</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H57U7</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H57U7</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 58 with 8 for U8-->
<valve_protocol>Hybridize 8</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H58U8</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H58U8</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H58U8</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 59 with 9 for U9-->
<valve_protocol>Hybridize 9</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H59U9</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H59U9</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H59U9</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 60 with 10 for U10-->
<valve_protocol>Hybridize 10</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H60U10</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H60U10</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H60U10</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 61 with 11 for U11-->
<valve_protocol>Hybridize 11</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H61U11</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H61U11</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H61U11</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 62 with 12 for U12-->
<valve_protocol>Hybridize 12</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H62U12</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H62U12</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H62U12</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 63 with 13 for U13-->
<valve_protocol>Hybridize 13</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H63U13</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H63U13</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H63U13</target_path>
<delete_source>True</delete_source>
</copy_directory>
<valve_protocol>Flow TCEP</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<!--Hyb 64 with 14 for U14-->
<valve_protocol>Hybridize 14</valve_protocol>
<delay>3000</delay>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\Bleach</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Bleach_750_647_s5"> </item>
</loop>
<delay>3000</delay>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow RNA common readouts</valve_protocol>
<valve_protocol>Incubate 10min</valve_protocol>
<valve_protocol>Flow Wash Buffer</valve_protocol>
<valve_protocol>Flow Imaging Buffer</valve_protocol>
<change_directory>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H64U14</change_directory>
<wakeup>5000</wakeup>
<loop name="Position Loop Zscan" increment="name">
<item name="Zscan_750_647_561_s50_n250_10Hz"> </item>
</loop>
<delay>2000</delay>
<copy_directory>
<source_path>D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\H64U14</source_path>
<target_path>\\10.245.74.158\Chromatin_NAS_1\20220102-CTP11-1000_CTP12-DNA_from_1229\H64U14</target_path>
<delete_source>True</delete_source>
</copy_directory>
</command_sequence>
</code>
# save this xml_____no_output_____
<code>
save_filename = os.path.join(source_folder, f"generated_dave_H{hyb_start}-{hyb_start+num_rounds-1}.txt")
with open(save_filename, 'w') as _output_handle:
print(save_filename)
_output_handle.write(final_str)
D:\Pu\20220102-CTP11-1000_CTP12-DNA_from_1229\generated_dave_H51-64.txt
</code>
| {
"repository": "shiwei23/STORM6",
"path": "jupyter/Generate_dave_xml_DNA_IDEX.ipynb",
"matched_keywords": [
"RNA"
],
"stars": 1,
"size": 31236,
"hexsha": "d0e150e572b0082c0c8c769b2ec6ea60a7e1d999",
"max_line_length": 125,
"avg_line_length": 46.6208955224,
"alphanum_fraction": 0.6119221411
} |
# Notebook from NinelK/course-content
Path: tutorials/W3D2_DynamicNetworks/W3D2_Tutorial1.ipynb
<a href="https://colab.research.google.com/github/NeuromatchAcademy/course-content/blob/master/tutorials/W3D2_DynamicNetworks/W3D2_Tutorial1.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____# Neuromatch Academy: Week 3, Day 2, Tutorial 1
# Neuronal Network Dynamics: Neural Rate Models
__Content creators:__ Qinglong Gu, Songtin Li, Arvind Kumar, John Murray, Julijana Gjorgjieva
__Content reviewers:__ Spiros Chavlis, Lorenzo Fontolan, Richard Gao, Maryam Vaziri-Pashkam,Michael Waskom
_____no_output_____---
# Tutorial Objectives
The brain is a complex system, not because it is composed of a large number of diverse types of neurons, but mainly because of how neurons are connected to each other. The brain is indeed a network of highly specialized neuronal networks.
The activity of a neural network constantly evolves in time. For this reason, neurons can be modeled as dynamical systems. The dynamical system approach is only one of the many modeling approaches that computational neuroscientists have developed (other points of view include information processing, statistical models, etc.).
How the dynamics of neuronal networks affect the representation and processing of information in the brain is an open question. However, signatures of altered brain dynamics present in many brain diseases (e.g., in epilepsy or Parkinson's disease) tell us that it is crucial to study network activity dynamics if we want to understand the brain.
In this tutorial, we will simulate and study one of the simplest models of biological neuronal networks. Instead of modeling and simulating individual excitatory neurons (e.g., LIF models that you implemented yesterday), we will treat them as a single homogeneous population and approximate their dynamics using a single one-dimensional equation describing the evolution of their average spiking rate in time.
In this tutorial, we will learn how to build a firing rate model of a single population of excitatory neurons.
**Steps:**
- Write the equation for the firing rate dynamics of a 1D excitatory population.
- Visualize the response of the population as a function of parameters such as threshold level and gain, using the frequency-current (F-I) curve.
- Numerically simulate the dynamics of the excitatory population and find the fixed points of the system.
- Investigate the stability of the fixed points by linearizing the dynamics around them.
_____no_output_____---
# Setup_____no_output_____
<code>
# Imports
import matplotlib.pyplot as plt
import numpy as np
import scipy.optimize as opt # root-finding algorithm_____no_output_____# @title Figure Settings
import ipywidgets as widgets # interactive display
%config InlineBackend.figure_format = 'retina'
plt.style.use("https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/nma.mplstyle")_____no_output_____# @title Helper functions
def plot_fI(x, f):
plt.figure(figsize=(6, 4)) # plot the figure
plt.plot(x, f, 'k')
plt.xlabel('x (a.u.)', fontsize=14)
plt.ylabel('F(x)', fontsize=14)
plt.show()
def plot_dr_r(r, drdt):
plt.figure()
plt.plot(r, drdt, 'k')
plt.plot(r, 0. * r, 'k--')
plt.xlabel(r'$r$')
plt.ylabel(r'$\frac{dr}{dt}$', fontsize=20)
plt.ylim(-0.1, 0.1)
def plot_dFdt(x, dFdt):
plt.figure()
plt.plot(x, dFdt, 'r')
plt.xlabel('x (a.u.)', fontsize=14)
plt.ylabel('dF(x)', fontsize=14)
plt.show()_____no_output_____
</code>
# Section 1: Neuronal network dynamics_____no_output_____
<code>
# @title Video 1: Dynamic networks
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="p848349hPyw", width=854, height=480, fs=1)
print("Video available at https://youtube.com/watch?v=" + video.id)
videoVideo available at https://youtube.com/watch?v=p848349hPyw
</code>
## Section 1.1: Dynamics of a single excitatory population
Individual neurons respond by spiking. When we average the spikes of neurons in a population, we can define the average firing activity of the population. In this model, we are interested in how the population-averaged firing varies as a function of time and network parameters. Mathematically, we can describe the firing rate dynamic as:
\begin{align}
\tau \frac{dr}{dt} &= -r + F(w\cdot r + I_{\text{ext}}) \quad\qquad (1)
\end{align}
$r(t)$ represents the average firing rate of the excitatory population at time $t$, $\tau$ controls the timescale of the evolution of the average firing rate, $w$ denotes the strength (synaptic weight) of the recurrent input to the population, $I_{\text{ext}}$ represents the external input, and the transfer function $F(\cdot)$ (which can be related to f-I curve of individual neurons described in the next sections) represents the population activation function in response to all received inputs.
To start building the model, please execute the cell below to initialize the simulation parameters._____no_output_____
<code>
# @title Default parameters for a single excitatory population model
def default_pars_single(**kwargs):
pars = {}
# Excitatory parameters
pars['tau'] = 1. # Timescale of the E population [ms]
pars['a'] = 1.2 # Gain of the E population
pars['theta'] = 2.8 # Threshold of the E population
# Connection strength
pars['w'] = 0. # E to E, we first set it to 0
# External input
pars['I_ext'] = 0.
# simulation parameters
pars['T'] = 20. # Total duration of simulation [ms]
pars['dt'] = .1 # Simulation time step [ms]
pars['r_init'] = 0.2 # Initial value of E
# External parameters if any
for k in kwargs:
pars[k] = kwargs[k]
# Vector of discretized time points [ms]
pars['range_t'] = np.arange(0, pars['T'], pars['dt'])
return pars
_____no_output_____
</code>
You can use:
- `pars = default_pars_single()` to get all the parameters, and then you can execute `print(pars)` to check these parameters.
- `pars = default_pars_single(T=T_sim, dt=time_step)` to set new simulation time and time step
- After `pars = default_pars_single()`, use `pars['New_para'] = value` to add an new parameter with its value_____no_output_____## Section 1.2: F-I curves
In electrophysiology, a neuron is often characterized by its spike rate output in response to input currents. This is often called the **F-I** curve, denoting the output spike frequency (**F**) in response to different injected currents (**I**). We estimated this for an LIF neuron in yesterday's tutorial.
The transfer function $F(\cdot)$ in Equation $1$ represents the gain of the population as a function of the total input. The gain is often modeled as a sigmoidal function, i.e., more input drive leads to a nonlinear increase in the population firing rate. The output firing rate will eventually saturate for high input values.
A sigmoidal $F(\cdot)$ is parameterized by its gain $a$ and threshold $\theta$.
$$ F(x;a,\theta) = \frac{1}{1+\text{e}^{-a(x-\theta)}} - \frac{1}{1+\text{e}^{a\theta}} \quad(2)$$
The argument $x$ represents the input to the population. Note that the second term is chosen so that $F(0;a,\theta)=0$.
Many other transfer functions (generally monotonic) can be also used. Examples are the rectified linear function $ReLU(x)$ or the hyperbolic tangent $tanh(x)$._____no_output_____### Exercise 1: Implement F-I curve
Let's first investigate the activation functions before simulating the dynamics of the entire population.
In this exercise, you will implement a sigmoidal **F-I** curve or transfer function $F(x)$, with gain $a$ and threshold level $\theta$ as parameters._____no_output_____
<code>
def F(x, a, theta):
"""
Population activation function.
Args:
x (float): the population input
a (float): the gain of the function
theta (float): the threshold of the function
Returns:
float: the population activation response F(x) for input x
"""
#################################################
## TODO for students: compute f = F(x) ##
# Fill out function and remove
raise NotImplementedError("Student excercise: implement the f-I function")
#################################################
# add the expression of f = F(x)
f = ...
return f
pars = default_pars_single() # get default parameters
x = np.arange(0, 10, .1) # set the range of input
# Uncomment below to test your function
# f = F(x, pars['a'], pars['theta'])
# plot_fI(x, f)_____no_output_____# to_remove solution
def F(x, a, theta):
"""
Population activation function.
Args:
x (float): the population input
a (float): the gain of the function
theta (float): the threshold of the function
Returns:
float: the population activation response F(x) for input x
"""
# add the expression of f = F(x)
f = (1 + np.exp(-a * (x - theta)))**-1 - (1 + np.exp(a * theta))**-1
return f
pars = default_pars_single() # get default parameters
x = np.arange(0, 10, .1) # set the range of input
# Uncomment below to test your function
f = F(x, pars['a'], pars['theta'])
with plt.xkcd():
plot_fI(x, f)_____no_output_____
</code>
### Interactive Demo: Parameter exploration of F-I curve
Here's an interactive demo that shows how the F-I curve is changing for different values of the gain and threshold parameters.
**Remember to enable the demo by running the cell.**_____no_output_____
<code>
# @title
# @markdown Make sure you execute this cell to enable the widget!
def interactive_plot_FI(a, theta):
"""
Population activation function.
Expecxts:
a : the gain of the function
theta : the threshold of the function
Returns:
plot the F-I curve with give parameters
"""
# set the range of input
x = np.arange(0, 10, .1)
plt.figure()
plt.plot(x, F(x, a, theta), 'k')
plt.xlabel('x (a.u.)', fontsize=14)
plt.ylabel('F(x)', fontsize=14)
plt.show()
_ = widgets.interact(interactive_plot_FI, a=(0.3, 3, 0.3), theta=(2, 4, 0.2))_____no_output_____
</code>
## Section 1.3: Simulation scheme of E dynamics
Because $F(\cdot)$ is a nonlinear function, the exact solution of Equation $1$ can not be determined via analytical methods. Therefore, numerical methods must be used to find the solution. In practice, the derivative on the left-hand side of Equation $1$ can be approximated using the Euler method on a time-grid of stepsize $\Delta t$:
\begin{align}
&\frac{dr}{dt} \approx \frac{r[k+1]-r[k]}{\Delta t}
\end{align}
where $r[k] = r(k\Delta t)$.
Thus,
$$\Delta r[k] = \frac{\Delta t}{\tau}[-r[k] + F(w\cdot r[k] + I_{\text{ext}}(k;a,\theta))]$$
Hence, Equation (1) is updated at each time step by:
$$r[k+1] = r[k] + \Delta r[k]$$
**_Please execute the following cell to enable the WC simulator_**_____no_output_____
<code>
# @title Single population rate model simulator: `simulate_single`
def simulate_single(pars):
"""
Simulate an excitatory population of neurons
Args:
pars : Parameter dictionary
Returns:
rE : Activity of excitatory population (array)
Example:
pars = default_pars_single()
r = simulate_single(pars)
"""
# Set parameters
tau, a, theta = pars['tau'], pars['a'], pars['theta']
w = pars['w']
I_ext = pars['I_ext']
r_init = pars['r_init']
dt, range_t = pars['dt'], pars['range_t']
Lt = range_t.size
# Initialize activity
r = np.zeros(Lt)
r[0] = r_init
I_ext = I_ext * np.ones(Lt)
# Update the E activity
for k in range(Lt - 1):
dr = dt / tau * (-r[k] + F(w * r[k] + I_ext[k], a, theta))
r[k+1] = r[k] + dr
return r
print(help(simulate_single))Help on function simulate_single in module __main__:
simulate_single(pars)
Simulate an excitatory population of neurons
Args:
pars : Parameter dictionary
Returns:
rE : Activity of excitatory population (array)
Example:
pars = default_pars_single()
r = simulate_single(pars)
None
</code>
### Interactive Demo: Parameter Exploration of single population dynamics
Note that $w=0$, as in the default setting, means no recurrent input to the neuron population in Equation (1). Hence, the dynamics is entirely determined by the external input $I_{\text{ext}}$. Try to explore how $r_{\text{sim}}(t)$ changes with different $I_{\text{ext}}$ and $\tau$ parameter values, and investigate the relationship between $F(I_{\text{ext}}; a, \theta)$ and the steady value of $r(t)$. Note that, $r_{\rm ana}(t)$ denotes the analytical solution._____no_output_____
<code>
# @title
# @markdown Make sure you execute this cell to enable the widget!
# get default parameters
pars = default_pars_single(T=20.)
def Myplot_E_diffI_difftau(I_ext, tau):
# set external input and time constant
pars['I_ext'] = I_ext
pars['tau'] = tau
# simulation
r = simulate_single(pars)
# Analytical Solution
r_ana = (pars['r_init']
+ (F(I_ext, pars['a'], pars['theta'])
- pars['r_init']) * (1. - np.exp(-pars['range_t'] / pars['tau'])))
# plot
plt.figure()
plt.plot(pars['range_t'], r, 'b', label=r'$r_{\mathrm{sim}}$(t)', alpha=0.5,
zorder=1)
plt.plot(pars['range_t'], r_ana, 'b--', lw=5, dashes=(2, 2),
label=r'$r_{\mathrm{ana}}$(t)', zorder=2)
plt.plot(pars['range_t'],
F(I_ext, pars['a'], pars['theta']) * np.ones(pars['range_t'].size),
'k--', label=r'$F(I_{\mathrm{ext}})$')
plt.xlabel('t (ms)', fontsize=16.)
plt.ylabel('Activity r(t)', fontsize=16.)
plt.legend(loc='best', fontsize=14.)
plt.show()
_ = widgets.interact(Myplot_E_diffI_difftau, I_ext=(0.0, 10., 1.),
tau=(1., 5., 0.2))_____no_output_____
</code>
## Think!
Above, we have numerically solved a system driven by a positive input and that, if $w_{EE} \neq 0$, receives an excitatory recurrent input (**try changing the value of $w_{EE}$ to a positive number**). Yet, $r_E(t)$ either decays to zero or reaches a fixed non-zero value.
- Why doesn't the solution of the system "explode" in a finite time? In other words, what guarantees that $r_E$(t) stays finite?
- Which parameter would you change in order to increase the maximum value of the response? _____no_output_____# Section 2: Fixed points of the single population system
_____no_output_____
<code>
# @title Video 2: Fixed point
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="Ox3ELd1UFyo", width=854, height=480, fs=1)
print("Video available at https://youtube.com/watch?v=" + video.id)
videoVideo available at https://youtube.com/watch?v=Ox3ELd1UFyo
</code>
As you varied the two parameters in the last Interactive Demo, you noticed that, while at first the system output quickly changes, with time, it reaches its maximum/minimum value and does not change anymore. The value eventually reached by the system is called the **steady state** of the system, or the **fixed point**. Essentially, in the steady states the derivative with respect to time of the activity ($r$) is zero, i.e. $\displaystyle \frac{dr}{dt}=0$.
We can find that the steady state of the Equation. (1) by setting $\displaystyle{\frac{dr}{dt}=0}$ and solve for $r$:
$$-r_{\text{steady}} + F(w\cdot r_{\text{steady}} + I_{\text{ext}};a,\theta) = 0, \qquad (3)$$
When it exists, the solution of Equation. (3) defines a **fixed point** of the dynamical system Equation. (1). Note that if $F(x)$ is nonlinear, it is not always possible to find an analytical solution, but the solution can be found via numerical simulations, as we will do later.
From the Interactive Demo, one could also notice that the value of $\tau$ influences how quickly the activity will converge to the steady state from its initial value.
In the specific case of $w=0$, we can also analytically compute the solution of Equation (1) (i.e., the thick blue dashed line) and deduce the role of $\tau$ in determining the convergence to the fixed point:
$$\displaystyle{r(t) = \big{[}F(I_{\text{ext}};a,\theta) -r(t=0)\big{]} (1-\text{e}^{-\frac{t}{\tau}})} + r(t=0)$$ \\
We can now numerically calculate the fixed point with the `scipy.optimize.root` function.
<font size=3><font color='gray'>_(Recall that at the very beginning, we `import scipy.optimize as opt` )_</font></font>.
\\
Please execute the cell below to define the functions `my_fp_single` and `check_fp_single`_____no_output_____
<code>
# @title Function of calculating the fixed point
# @markdown Make sure you execute this cell to enable the function!
def my_fp_single(pars, r_init):
"""
Calculate the fixed point through drE/dt=0
Args:
pars : Parameter dictionary
r_init : Initial value used for scipy.optimize function
Returns:
x_fp : value of fixed point
"""
# get the parameters
a, theta = pars['a'], pars['theta']
w = pars['w']
I_ext = pars['I_ext']
# define the right hand of E dynamics
def my_WCr(x):
r = x
drdt = (-r + F(w * r + I_ext, a, theta))
y = np.array(drdt)
return y
x0 = np.array(r_init)
x_fp = opt.root(my_WCr, x0).x
return x_fp
print(help(my_fp_single))
def check_fp_single(pars, x_fp, mytol=1e-4):
"""
Verify |dr/dt| < mytol
Args:
pars : Parameter dictionary
fp : value of fixed point
mytol : tolerance, default as 10^{-4}
Returns :
Whether it is a correct fixed point: True/False
"""
a, theta = pars['a'], pars['theta']
w = pars['w']
I_ext = pars['I_ext']
# calculate Equation(3)
y = x_fp - F(w * x_fp + I_ext, a, theta)
# Here we set tolerance as 10^{-4}
return np.abs(y) < mytol
print(help(check_fp_single))Help on function my_fp_single in module __main__:
my_fp_single(pars, r_init)
Calculate the fixed point through drE/dt=0
Args:
pars : Parameter dictionary
r_init : Initial value used for scipy.optimize function
Returns:
x_fp : value of fixed point
None
Help on function check_fp_single in module __main__:
check_fp_single(pars, x_fp, mytol=0.0001)
Verify |dr/dt| < mytol
Args:
pars : Parameter dictionary
fp : value of fixed point
mytol : tolerance, default as 10^{-4}
Returns :
Whether it is a correct fixed point: True/False
None
</code>
## Exercise 2: Visualization of the fixed point
When it is not possible to find the solution for Equation (3) analytically, a graphical approach can be taken. To that end, it is useful to plot $\displaystyle{\frac{dr}{dt}=0}$ as a function of $r$. The values of $r$ for which the plotted function crosses zero on the y axis correspond to fixed points.
Here, let us, for example, set $w=5.0$ and $I^{\text{ext}}=0.5$. From Equation (1), you can obtain
$$\frac{dr}{dt} = [-r + F(w\cdot r + I^{\text{ext}})]/\tau $$
Then, plot the $dr/dt$ as a function of $r$, and check for the presence of fixed points. _____no_output_____Finally, try to find the fixed points using the previously defined function `my_fp_single(pars, r_init)` with proper initial values ($r_{\text{init}}$). You can use the previously defined function `check_fp_single(pars, x_fp)` to verify that the values of $r{\rm fp}$ for which $\displaystyle{\frac{dr}{dt}} = 0$ are the true fixed points. From the line $\displaystyle{\frac{dr}{dt}}$ plotted above, the proper initial values can be chosen as the values close to where the line crosses zero on the y axis (real fixed point)._____no_output_____
<code>
pars = default_pars_single() # get default parameters
# set your external input and wEE
pars['I_ext'] = 0.5
pars['w'] = 5.0
r = np.linspace(0, 1, 1000) # give the values of r
# Calculate drEdt
# drdt = ...
# Uncomment this to plot the drdt across r
# plot_dr_r(r, drdt)
################################################################
# TODO for students:
# Find the values close to the intersections of drdt and y=0
# as your initial values
# Calculate the fixed point with your initial value, verify them,
# and plot the corret ones
# check if x_fp is the intersection of the lines with the given function
# check_fpE(pars, x_fp)
# vary different initial values to find the correct fixed point (Should be 3)
# Use blue, red and yellow colors, respectively ('b', 'r', 'y' codenames)
################################################################
# Calculate the first fixed point with your initial value
# x_fp_1 = my_fp_single(pars, ...)
# if check_fp_single(pars, x_fp_1):
# plt.plot(x_fp_1, 0, 'bo', ms=8)
# Calculate the second fixed point with your initial value
# x_fp_2 = my_fp_single(pars, ...)
# if check_fp_single(pars, x_fp_2):
# plt.plot(x_fp_2, 0, 'ro', ms=8)
# Calculate the third fixed point with your initial value
# x_fp_3 = my_fp_single(pars, ...)
# if check_fp_single(pars, x_fp_3):
# plt.plot(x_fp_3, 0, 'yo', ms=8)_____no_output_____# to_remove solution
pars = default_pars_single() # get default parameters
# set your external input and wEE
pars['I_ext'] = 0.5
pars['w'] = 5.0
r = np.linspace(0, 1, 1000) # give the values of r
# Calculate drEdt
drdt = (-r + F(pars['w'] * r + pars['I_ext'],
pars['a'], pars['theta'])) / pars['tau']
with plt.xkcd():
plot_dr_r(r, drdt)
# Calculate the first fixed point with your initial value
x_fp_1 = my_fp_single(pars, 0.)
if check_fp_single(pars, x_fp_1):
plt.plot(x_fp_1, 0, 'bo', ms=8)
# Calculate the second fixed point with your initial value
x_fp_2 = my_fp_single(pars, 0.4)
if check_fp_single(pars, x_fp_2):
plt.plot(x_fp_2, 0, 'ro', ms=8)
# Calculate the third fixed point with your initial value
x_fp_3 = my_fp_single(pars, 0.9)
if check_fp_single(pars, x_fp_3):
plt.plot(x_fp_3, 0, 'yo', ms=8)
plt.show()_____no_output_____
</code>
## Interactive Demo: fixed points as a function of recurrent and external inputs.
You can now explore how the previous plot changes when the recurrent coupling $w$ and the external input $I_{\text{ext}}$ take different values._____no_output_____
<code>
# @title
# @markdown Make sure you execute this cell to enable the widget!
def plot_intersection_single(w, I_ext):
# set your parameters
pars['w'] = w
pars['I_ext'] = I_ext
# note that wEE!=0
if w > 0:
# find fixed point
x_fp_1 = my_fp_single(pars, 0.)
x_fp_2 = my_fp_single(pars, 0.4)
x_fp_3 = my_fp_single(pars, 0.9)
plt.figure()
r = np.linspace(0, 1., 1000)
drdt = (-r + F(w * r + I_ext, pars['a'], pars['theta'])) / pars['tau']
plt.plot(r, drdt, 'k')
plt.plot(r, 0. * r, 'k--')
if check_fp_single(pars, x_fp_1):
plt.plot(x_fp_1, 0, 'bo', ms=8)
if check_fp_single(pars, x_fp_2):
plt.plot(x_fp_2, 0, 'ro', ms=8)
if check_fp_single(pars, x_fp_3):
plt.plot(x_fp_3, 0, 'yo', ms=8)
plt.xlabel(r'$r$', fontsize=14.)
plt.ylabel(r'$\frac{dr}{dt}$', fontsize=20.)
plt.show()
_ = widgets.interact(plot_intersection_single, w=(1, 7, 0.2),
I_ext=(0, 3, 0.1))_____no_output_____
</code>
---
# Summary
In this tutorial, we have investigated the dynamics of a rate-based single population of neurons.
We learned about:
- The effect of the input parameters and the time constant of the network on the dynamics of the population.
- How to find the fixed point(s) of the system.
Next, we have two Bonus, but important concepts in dynamical system analysis and simulation. If you have time left, watch the next video and proceed to solve the exercises. You will learn:
- How to determine the stability of a fixed point by linearizing the system.
- How to add realistic inputs to our model._____no_output_____---
# Bonus 1: Stability of a fixed point_____no_output_____
<code>
# @title Video 3: Stability of fixed points
from IPython.display import YouTubeVideo
video = YouTubeVideo(id="KKMlWWU83Jg", width=854, height=480, fs=1)
print("Video available at https://youtube.com/watch?v=" + video.id)
videoVideo available at https://youtube.com/watch?v=KKMlWWU83Jg
</code>
#### Initial values and trajectories
Here, let us first set $w=5.0$ and $I_{\text{ext}}=0.5$, and investigate the dynamics of $r(t)$ starting with different initial values $r(0) \equiv r_{\text{init}}$. We will plot the trajectories of $r(t)$ with $r_{\text{init}} = 0.0, 0.1, 0.2,..., 0.9$._____no_output_____
<code>
# @title Initial values
# @markdown Make sure you execute this cell to see the trajectories!
pars = default_pars_single()
pars['w'] = 5.0
pars['I_ext'] = 0.5
plt.figure(figsize=(8, 5))
for ie in range(10):
pars['r_init'] = 0.1 * ie # set the initial value
r = simulate_single(pars) # run the simulation
# plot the activity with given initial
plt.plot(pars['range_t'], r, 'b', alpha=0.1 + 0.1 * ie,
label=r'r$_{\mathrm{init}}$=%.1f' % (0.1 * ie))
plt.xlabel('t (ms)')
plt.title('Two steady states?')
plt.ylabel(r'$r$(t)')
plt.legend(loc=[1.01, -0.06], fontsize=14)
plt.show()_____no_output_____
</code>
## Interactive Demo: dynamics as a function of the initial value
Let's now set $r_{\rm init}$ to a value of your choice in this demo. How does the solution change? What do you observe?_____no_output_____
<code>
# @title
# @markdown Make sure you execute this cell to enable the widget!
pars = default_pars_single()
pars['w'] = 5.0
pars['I_ext'] = 0.5
def plot_single_diffEinit(r_init):
pars['r_init'] = r_init
r = simulate_single(pars)
plt.figure()
plt.plot(pars['range_t'], r, 'b', zorder=1)
plt.plot(0, r[0], 'bo', alpha=0.7, zorder=2)
plt.xlabel('t (ms)', fontsize=16)
plt.ylabel(r'$r(t)$', fontsize=16)
plt.ylim(0, 1.0)
plt.show()
_ = widgets.interact(plot_single_diffEinit, r_init=(0, 1, 0.02))_____no_output_____
</code>
### Stability analysis via linearization of the dynamics
Just like Equation $1$ in the case ($w=0$) discussed above, a generic linear system
$$\frac{dx}{dt} = \lambda (x - b),$$
has a fixed point for $x=b$. The analytical solution of such a system can be found to be:
$$x(t) = b + \big{(} x(0) - b \big{)} \text{e}^{\lambda t}.$$
Now consider a small perturbation of the activity around the fixed point: $x(0) = b+ \epsilon$, where $|\epsilon| \ll 1$. Will the perturbation $\epsilon(t)$ grow with time or will it decay to the fixed point? The evolution of the perturbation with time can be written, using the analytical solution for $x(t)$, as:
$$\epsilon (t) = x(t) - b = \epsilon \text{e}^{\lambda t}$$
- if $\lambda < 0$, $\epsilon(t)$ decays to zero, $x(t)$ will still converge to $b$ and the fixed point is "**stable**".
- if $\lambda > 0$, $\epsilon(t)$ grows with time, $x(t)$ will leave the fixed point $b$ exponentially, and the fixed point is, therefore, "**unstable**" ._____no_output_____### Compute the stability of Equation $1$
Similar to what we did in the linear system above, in order to determine the stability of a fixed point $r^{*}$ of the excitatory population dynamics, we perturb Equation (1) around $r^{*}$ by $\epsilon$, i.e. $r = r^{*} + \epsilon$. We can plug in Equation (1) and obtain the equation determining the time evolution of the perturbation $\epsilon(t)$:
\begin{align}
\tau \frac{d\epsilon}{dt} \approx -\epsilon + w F'(w\cdot r^{*} + I_{\text{ext}};a,\theta) \epsilon
\end{align}
where $F'(\cdot)$ is the derivative of the transfer function $F(\cdot)$. We can rewrite the above equation as:
\begin{align}
\frac{d\epsilon}{dt} \approx \frac{\epsilon}{\tau }[-1 + w F'(w\cdot r^* + I_{\text{ext}};a,\theta)]
\end{align}
That is, as in the linear system above, the value of
$$\lambda = [-1+ wF'(w\cdot r^* + I_{\text{ext}};a,\theta)]/\tau \qquad (4)$$
determines whether the perturbation will grow or decay to zero, i.e., $\lambda$ defines the stability of the fixed point. This value is called the **eigenvalue** of the dynamical system._____no_output_____## Exercise 3: Compute $dF$ and Eigenvalue
The derivative of the sigmoid transfer function is:
\begin{align}
\frac{dF}{dx} & = \frac{d}{dx} (1+\exp\{-a(x-\theta)\})^{-1} \\
& = a\exp\{-a(x-\theta)\} (1+\exp\{-a(x-\theta)\})^{-2}. \qquad (5)
\end{align}
Let's now find the expression for the derivative $\displaystyle{\frac{dF}{dx}}$ in the following cell and plot it._____no_output_____
<code>
def dF(x, a, theta):
"""
Population activation function.
Args:
x : the population input
a : the gain of the function
theta : the threshold of the function
Returns:
dFdx : the population activation response F(x) for input x
"""
#################################################
# TODO for students: compute dFdx ##
raise NotImplementedError("Student excercise: compute the deravitive of F")
#################################################
# Calculate the population activation
dFdx = ...
return dFdx
pars = default_pars_single() # get default parameters
x = np.arange(0, 10, .1) # set the range of input
# Uncomment below to test your function
# df = dF(x, pars['a'], pars['theta'])
# plot_dFdt(x, df)_____no_output_____# to_remove solution
def dF(x, a, theta):
"""
Population activation function.
Args:
x : the population input
a : the gain of the function
theta : the threshold of the function
Returns:
dFdx : the population activation response F(x) for input x
"""
# Calculate the population activation
dFdx = a * np.exp(-a * (x - theta)) * (1 + np.exp(-a * (x - theta)))**-2
return dFdx
pars = default_pars_single() # get default parameters
x = np.arange(0, 10, .1) # set the range of input
df = dF(x, pars['a'], pars['theta'])
with plt.xkcd():
plot_dFdt(x, df)_____no_output_____
</code>
## Exercise 4: Compute eigenvalues
As discussed above, for the case with $w=5.0$ and $I_{\text{ext}}=0.5$, the system displays **three** fixed points. However, when we simulated the dynamics and varied the initial conditions $r_{\rm init}$, we could only obtain **two** steady states. In this exercise, we will now check the stability of each of the three fixed points by calculating the corresponding eigenvalues with the function `eig_single`. Check the sign of each eigenvalue (i.e., stability of each fixed point). How many of the fixed points are stable?
Note that the expression of the eigenvalue at fixed point $r^*$
$$\lambda = [-1+ wF'(w\cdot r^* + I_{\text{ext}};a,\theta)]/\tau$$_____no_output_____
<code>
def eig_single(pars, fp):
"""
Args:
pars : Parameter dictionary
fp : fixed point r_fp
Returns:
eig : eigevalue of the linearized system
"""
# get the parameters
tau, a, theta = pars['tau'], pars['a'], pars['theta']
w, I_ext = pars['w'], pars['I_ext']
print(tau, a, theta, w, I_ext)
#################################################
## TODO for students: compute eigenvalue ##
raise NotImplementedError("Student excercise: compute the eigenvalue")
#################################################
# Compute the eigenvalue
eig = ...
return eig
pars = default_pars_single()
pars['w'] = 5.0
pars['I_ext'] = 0.5
# Uncomment below lines after completing the eig_single function.
# Find the eigenvalues for all fixed points of Exercise 2
# x_fp_1 = my_fp_single(pars, 0.).item()
# eig_fp_1 = eig_single(pars, x_fp_1).item()
# print(f'Fixed point1 at {x_fp_1:.3f} with Eigenvalue={eig_fp_1:.3f}')
# x_fp_2 = my_fp_single(pars, 0.4).item()
# eig_fp_2 = eig_single(pars, x_fp_2).item()
# print(f'Fixed point2 at {x_fp_2:.3f} with Eigenvalue={eig_fp_2:.3f}')
# x_fp_3 = my_fp_single(pars, 0.9).item()
# eig_fp_3 = eig_single(pars, x_fp_3).item()
# print(f'Fixed point3 at {x_fp_3:.3f} with Eigenvalue={eig_fp_3:.3f}')_____no_output_____
</code>
**SAMPLE OUTPUT**
```
Fixed point1 at 0.042 with Eigenvalue=-0.583
Fixed point2 at 0.447 with Eigenvalue=0.498
Fixed point3 at 0.900 with Eigenvalue=-0.626
```_____no_output_____
<code>
# to_remove solution
def eig_single(pars, fp):
"""
Args:
pars : Parameter dictionary
fp : fixed point r_fp
Returns:
eig : eigevalue of the linearized system
"""
# get the parameters
tau, a, theta = pars['tau'], pars['a'], pars['theta']
w, I_ext = pars['w'], pars['I_ext']
print(tau, a, theta, w, I_ext)
# Compute the eigenvalue
eig = (-1. + w * dF(w * fp + I_ext, a, theta)) / tau
return eig
pars = default_pars_single()
pars['w'] = 5.0
pars['I_ext'] = 0.5
# Find the eigenvalues for all fixed points of Exercise 2
x_fp_1 = my_fp_single(pars, 0.).item()
eig_fp_1 = eig_single(pars, x_fp_1).item()
print(f'Fixed point1 at {x_fp_1:.3f} with Eigenvalue={eig_fp_1:.3f}')
x_fp_2 = my_fp_single(pars, 0.4).item()
eig_fp_2 = eig_single(pars, x_fp_2).item()
print(f'Fixed point2 at {x_fp_2:.3f} with Eigenvalue={eig_fp_2:.3f}')
x_fp_3 = my_fp_single(pars, 0.9).item()
eig_fp_3 = eig_single(pars, x_fp_3).item()
print(f'Fixed point3 at {x_fp_3:.3f} with Eigenvalue={eig_fp_3:.3f}')1.0 1.2 2.8 5.0 0.5
Fixed point1 at 0.042 with Eigenvalue=-0.583
1.0 1.2 2.8 5.0 0.5
Fixed point2 at 0.447 with Eigenvalue=0.498
1.0 1.2 2.8 5.0 0.5
Fixed point3 at 0.900 with Eigenvalue=-0.626
</code>
## Think!
Throughout the tutorial, we have assumed $w> 0 $, i.e., we considered a single population of **excitatory** neurons. What do you think will be the behavior of a population of inhibitory neurons, i.e., where $w> 0$ is replaced by $w< 0$? _____no_output_____---
# Bonus 2: Noisy input drives the transition between two stable states
_____no_output_____## Ornstein-Uhlenbeck (OU) process
As discussed in several previous tutorials, the OU process is usually used to generate a noisy input into the neuron. The OU input $\eta(t)$ follows:
$$\tau_\eta \frac{d}{dt}\eta(t) = -\eta (t) + \sigma_\eta\sqrt{2\tau_\eta}\xi(t)$$
Execute the following function `my_OU(pars, sig, myseed=False)` to generate an OU process._____no_output_____
<code>
# @title OU process `my_OU(pars, sig, myseed=False)`
# @markdown Make sure you execute this cell to visualize the noise!
def my_OU(pars, sig, myseed=False):
"""
A functions that generates Ornstein-Uhlenback process
Args:
pars : parameter dictionary
sig : noise amplitute
myseed : random seed. int or boolean
Returns:
I : Ornstein-Uhlenbeck input current
"""
# Retrieve simulation parameters
dt, range_t = pars['dt'], pars['range_t']
Lt = range_t.size
tau_ou = pars['tau_ou'] # [ms]
# set random seed
if myseed:
np.random.seed(seed=myseed)
else:
np.random.seed()
# Initialize
noise = np.random.randn(Lt)
I_ou = np.zeros(Lt)
I_ou[0] = noise[0] * sig
# generate OU
for it in range(Lt - 1):
I_ou[it + 1] = (I_ou[it]
+ dt / tau_ou * (0. - I_ou[it])
+ np.sqrt(2 * dt / tau_ou) * sig * noise[it + 1])
return I_ou
pars = default_pars_single(T=100)
pars['tau_ou'] = 1. # [ms]
sig_ou = 0.1
I_ou = my_OU(pars, sig=sig_ou, myseed=2020)
plt.figure(figsize=(10, 4))
plt.plot(pars['range_t'], I_ou, 'r')
plt.xlabel('t (ms)')
plt.ylabel(r'$I_{\mathrm{OU}}$')
plt.show()_____no_output_____
</code>
## Example: Up-Down transition
In the presence of two or more fixed points, noisy inputs can drive a transition between the fixed points! Here, we stimulate an E population for 1,000 ms applying OU inputs._____no_output_____
<code>
# @title Simulation of an E population with OU inputs
# @markdown Make sure you execute this cell to spot the Up-Down states!
pars = default_pars_single(T=1000)
pars['w'] = 5.0
sig_ou = 0.7
pars['tau_ou'] = 1. # [ms]
pars['I_ext'] = 0.56 + my_OU(pars, sig=sig_ou, myseed=2020)
r = simulate_single(pars)
plt.figure(figsize=(10, 4))
plt.plot(pars['range_t'], r, 'b', alpha=0.8)
plt.xlabel('t (ms)')
plt.ylabel(r'$r(t)$')
plt.show()_____no_output_____
</code>
| {
"repository": "NinelK/course-content",
"path": "tutorials/W3D2_DynamicNetworks/W3D2_Tutorial1.ipynb",
"matched_keywords": [
"evolution"
],
"stars": null,
"size": 861372,
"hexsha": "d0e159ab0214356424fe7b46700d79e5e90fda3b",
"max_line_length": 111608,
"avg_line_length": 268.9266312832,
"alphanum_fraction": 0.9158017674
} |
# Notebook from esandford/forecaster
Path: .ipynb_checkpoints/demo-checkpoint.ipynb
WARNING:
"fitting_parameters.h5" need to be in the directory you are working on
or there will be an error for importing mr_forecast in the next cell.
If you don't want the file in this directory,
change the mr_forecast.py line 16
hyper_file = 'fitting_parameters.h5'
->
hyper_file = [directory of fitting parameter file]+'fitting_parameters.h5'_____no_output_____
<code>
import numpy as np
import mr_forecast as mr
import matplotlib.pyplot as plt
%matplotlib inline_____no_output_____
</code>
================================
predict the mean and std of radius given those of the mass_____no_output_____
<code>
Rmedian, Rplus, Rminus = mr.Mstat2R(mean=1.0, std=0.1, unit='Earth', sample_size=100, classify='Yes')Terran 97.0 %, Neptunian 3.0 %, Jovian 0.0 %, Star 0.0 %
print 'R = %.2f (+ %.2f - %.2f) REarth' % (Rmedian, Rplus, Rminus)R = 1.00 (+ 0.12 - 0.10) REarth
</code>
================================
predict a vector of radius given a vector of mass_____no_output_____
<code>
M1 = np.loadtxt('demo_mass.dat')
R1 = mr.Mpost2R(M1, unit='Earth', classify='Yes')Terran 100.0 %, Neptunian 0.0 %, Jovian 0.0 %, Star 0.0 %
plt.plot(np.log10(M1), np.log10(R1), 'bx')
plt.xlabel(r'$log_{10}\ M/M_{\oplus}$')
plt.ylabel(r'$log_{10}\ R/R_{\oplus}$')
plt.show()_____no_output_____
</code>
================================
predict the mean and std of mass given those of the radius_____no_output_____
<code>
Mmedian, Mplus, Mminus = mr.Rstat2M(mean=0.1, std=0.01, unit='Jupiter', sample_size=100, grid_size=1e3, classify='Yes')Terran 69.0 %, Neptunian 31.0 %, Jovian 0.0 %, Star 0.0 %
print 'M = %.3f (+ %.3f - %.3f) MEarth' % (Mmedian, Mplus, Mminus)M = 0.005 (+ 0.004 - 0.002) MEarth
</code>
================================
predict a vector of mass given a vector of radius_____no_output_____
<code>
R2 = np.loadtxt('demo_radius.dat')
M2 = mr.Rpost2M(R2, unit='Earth', grid_size=1e3, classify='Yes')Terran 72.0 %, Neptunian 28.0 %, Jovian 0.0 %, Star 0.0 %
plt.hist(np.log10(M2))
plt.xlabel(r'$log_{10}\ M/M_{\odot}$')
plt.show()_____no_output_____
</code>
| {
"repository": "esandford/forecaster",
"path": ".ipynb_checkpoints/demo-checkpoint.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 16,
"size": 26143,
"hexsha": "d0e1d8b688b6caf3b5ce43c52c2c8649e9c416b3",
"max_line_length": 14334,
"avg_line_length": 97.1858736059,
"alphanum_fraction": 0.8569024213
} |
# Notebook from brouhardlab/kappa
Path: Analysis/Notebooks/Spiral Dataset/2_Measure_Curvature.ipynb
**Important**: This notebook is different from the other as it directly calls **ImageJ Kappa plugin** using the [`scyjava` ImageJ brige](https://github.com/scijava/scyjava).
Since Kappa uses ImageJ1 features, you might not be able to run this notebook on an headless machine (need to be tested)._____no_output_____
<code>
from pathlib import Path
import pandas as pd
import numpy as np
from tqdm.auto import tqdm
import sys; sys.path.append("../../")
import pykappa
# Init ImageJ with Fiji plugins
# It can take a while if Java artifacts are not yet cached.
import imagej
java_deps = []
java_deps.append('org.scijava:Kappa:1.7.1')
ij = imagej.init("+".join(java_deps), headless=False)
import jnius
# Load Java classes
KappaFrame = jnius.autoclass('sc.fiji.kappa.gui.KappaFrame')
CurvesExporter = jnius.autoclass('sc.fiji.kappa.gui.CurvesExporter')
# Load ImageJ services
dsio = ij.context.getService(jnius.autoclass('io.scif.services.DatasetIOService'))
dsio = jnius.cast('io.scif.services.DatasetIOService', dsio)
# Set data path
data_dir = Path("/home/hadim/.data/Postdoc/Kappa/spiral_curve_SDM/")
# Pixel size used when fixed
fixed_pixel_size = 0.16
# Used to select pixels around the initialization curves
base_radius_um = 1.6
enable_control_points_adjustment = True
# "Point Distance Minimization" or "Squared Distance Minimization"
if '_SDM' in data_dir.name:
fitting_algorithm = "Squared Distance Minimization"
else:
fitting_algorithm = "Point Distance Minimization"
fitting_algorithm_____no_output_____experiment_names = ['variable_snr', 'variable_initial_position', 'variable_pixel_size', 'variable_psf_size']
experiment_names = ['variable_psf_size']
for experiment_name in tqdm(experiment_names, total=len(experiment_names)):
experiment_path = data_dir / experiment_name
fnames = sorted(list(experiment_path.glob("*.tif")))
n = len(fnames)
for fname in tqdm(fnames, total=n, leave=False):
tqdm.write(str(fname))
kappa_path = fname.with_suffix(".kapp")
assert kappa_path.exists(), f'{kappa_path} does not exist.'
curvatures_path = fname.with_suffix(".csv")
if not curvatures_path.is_file():
frame = KappaFrame(ij.context)
frame.getKappaMenubar().openImageFile(str(fname))
frame.resetCurves()
frame.getKappaMenubar().loadCurveFile(str(kappa_path))
frame.getCurves().setAllSelected()
# Compute threshold according to the image
dataset = dsio.open(str(fname))
mean = ij.op().stats().mean(dataset).getRealDouble()
std = ij.op().stats().stdDev(dataset).getRealDouble()
threshold = int(mean + std * 2)
# Used fixed pixel size or the one in the filename
if fname.stem.startswith('pixel_size'):
pixel_size = float(fname.stem.split("_")[-2])
if experiment_name == 'variable_psf_size':
pixel_size = 0.01
else:
pixel_size = fixed_pixel_size
base_radius = int(np.round(base_radius_um / pixel_size))
# Set curve fitting parameters
frame.setEnableCtrlPtAdjustment(enable_control_points_adjustment)
frame.setFittingAlgorithm(fitting_algorithm)
frame.getInfoPanel().thresholdRadiusSpinner.setValue(ij.py.to_java(base_radius))
frame.getInfoPanel().thresholdSlider.setValue(threshold)
frame.getInfoPanel().updateConversionField(str(pixel_size))
# Fit the curves
frame.fitCurves()
# Save fitted curves
frame.getKappaMenubar().saveCurveFile(str(fname.with_suffix(".FITTED.kapp")))
# Export results
exporter = CurvesExporter(frame)
exporter.exportToFile(str(curvatures_path), False)
# Remove duplicate rows during CSV export.
# See https://github.com/brouhardlab/Kappa/issues/12
df = pd.read_csv(curvatures_path)
df = df.drop_duplicates()
df.to_csv(curvatures_path)_____no_output_____0.13**2_____no_output_____
</code>
| {
"repository": "brouhardlab/kappa",
"path": "Analysis/Notebooks/Spiral Dataset/2_Measure_Curvature.ipynb",
"matched_keywords": [
"ImageJ"
],
"stars": 7,
"size": 7892,
"hexsha": "d0e2a0c6f4a3fa82489003c4d0edb2e0fe262af9",
"max_line_length": 182,
"avg_line_length": 34.3130434783,
"alphanum_fraction": 0.5622149012
} |
# Notebook from hoangkimkhai276/recsys
Path: GRS.ipynb
<code>
import pandas as pd
import random_____no_output_____
</code>
### Read the data_____no_output_____
<code>
movies_df = pd.read_csv('mymovies.csv')
ratings_df = pd.read_csv('myratings.csv')_____no_output_____
</code>
### Select the data
The recommender system should avoid bias, for example, the recommender system should not recommend movie with just 1 rating which is also a 5-star rating. But should recommend movies with more ratings.
Therefore, we only take into account movies with at least 200 ratings and users who have at least rated 50 movies._____no_output_____
<code>
user_threshold = 50
movie_threshold = 200
filtered_users = ratings_df['user'].value_counts()>=user_threshold
filtered_users = filtered_users[filtered_users].index.tolist()
filtered_movies = ratings_df['item'].value_counts()>=movie_threshold
filtered_movies = filtered_movies[filtered_movies].index.tolist()
filtered_df = ratings_df[(ratings_df['user'].isin(filtered_users)) & (ratings_df['item'].isin(filtered_movies))]_____no_output_____display(filtered_df)_____no_output_____
</code>
### Select a group of n random users
Here we let n = 5, we select 5 random users from the filtered dataset_____no_output_____
<code>
#Select a random group of user
user_ids = filtered_df['user'].unique()
group_users_ids = random.sample(list(user_ids), 5)_____no_output_____group_users_ids_____no_output_____
</code>
### Select rated and unrated movies for the given group
We now can get the rated movies all users in the groups, and from that, we can also get the unrated movies for the whole group of 5 _____no_output_____
<code>
selected_group_rating = ratings_df.loc[ratings_df['user'].isin(group_users_ids)]
group_rated_movies_ids = selected_group_rating['item'].unique()
group_unrated_movies_ids = set(movies_df['item']) - set(group_rated_movies_ids)
group_rated_movies_df = movies_df.loc[movies_df['item'].isin(group_rated_movies_ids)]
group_unrated_movies_df = movies_df.loc[movies_df['item'].isin(group_unrated_movies_ids)]_____no_output_____group_rated_movies_df_____no_output_____group_unrated_movies_df_____no_output_____
</code>
### Calculate expected ratings for unrated movies
For each users, we need to calculate the expected ratings for the user's unrated movies. To calculate unrated ratings, we first need to train
an algorithm, here, the SVD algorithm from Surprise is used
_____no_output_____
<code>
from surprise import Reader, Dataset, SVD
from surprise.model_selection.validation import cross_validate_____no_output_____
</code>
We perform 5-fold cross validation on the whole ratings dataset to see how well SVD will perform_____no_output_____
<code>
reader = Reader()
data = Dataset.load_from_df(ratings_df[['user', 'item', 'rating']], reader)
svd = SVD()
cross_validate(svd, data, measures=['RMSE', 'MAE'], cv=5, verbose=True)Evaluating RMSE, MAE of algorithm SVD on 5 split(s).
Fold 1 Fold 2 Fold 3 Fold 4 Fold 5 Mean Std
RMSE (testset) 0.8776 0.8791 0.8701 0.8678 0.8708 0.8731 0.0044
MAE (testset) 0.6764 0.6764 0.6677 0.6680 0.6679 0.6713 0.0042
Fit time 9.63 9.58 9.44 9.52 9.58 9.55 0.06
Test time 0.17 0.16 0.22 0.23 0.16 0.19 0.03
</code>
Next, We train the SVD model on the dataset_____no_output_____
<code>
trainset = data.build_full_trainset()
svd = svd.fit(trainset)_____no_output_____def predict(user):
unrated_movies = list(group_unrated_movies_df['item'].unique())
pred = pd.DataFrame()
i = 0
for item in unrated_movies:
pred = pred.append({'user':user,'item': item, 'predicted_rating':svd.predict(user, item)[3]}, ignore_index=True)
return pred
_____no_output_____users_rating = []
for user in group_users_ids:
prediction = predict(user)
prediction = prediction.sort_values('predicted_rating')
prediction = prediction.merge(movies_df, on= 'item')
users_rating.append(prediction[['user','item','title','predicted_rating']])_____no_output_____
</code>
The algorithm will iterate through 5 users, for each user, it will calculate the predicted rating for each unrated movie. Then the algorithm combines the predicted ratings of 5 users into one big dataset, to perform aggregation calculation_____no_output_____
<code>
final = pd.concat([df for df in users_rating], ignore_index = True)_____no_output_____final_____no_output_____
</code>
### Additive Strategy_____no_output_____
<code>
additive = final.copy()
additive= additive.groupby(['item','title']).sum()
additive = additive.sort_values(by="predicted_rating", ascending=False).reset_index()
additive_____no_output_____
</code>
### Most Pleasure Strategy_____no_output_____
<code>
most_pleasure = final.copy()_____no_output_____most_pleasure = final.copy()
most_pleasure= most_pleasure.groupby(['item','title']).max()
most_pleasure = most_pleasure.sort_values(by="predicted_rating", ascending=False).reset_index()
most_pleasure_____no_output_____
</code>
### Least Misery Strategy_____no_output_____
<code>
least_misery = final.copy()
least_misery = final.copy()
least_misery= least_misery.groupby(['item','title']).min()
least_misery = least_misery.sort_values(by="predicted_rating", ascending=False).reset_index()
least_misery_____no_output_____def fairness():
titles = []
for uid in group_users_ids:
data = final.loc[final['user'] == uid]
data = data.sort_values(by = 'predicted_rating', ascending = False).reset_index().iloc[0]['title']
titles.append([uid,data])
return titles_____no_output_____tt = fairness()
print(tt)[[387, '12 angry men'], [384, 'eternal sunshine of the spotless mind'], [460, 'amadeus'], [76, "guess who's coming to dinner"], [151, 'kiss kiss bang bang']]
def gen_rec_and_explain():
most_pleasure = final.copy()
most_pleasure= most_pleasure.groupby(['item','title']).max()
most_pleasure = most_pleasure.sort_values(by="predicted_rating", ascending=False).reset_index()
most_pleasure_movie = most_pleasure.iloc[0:5]['title']
least_misery = final.copy()
least_misery= least_misery.groupby(['item','title']).min()
least_misery = least_misery.sort_values(by="predicted_rating", ascending=False).reset_index()
least_misery_movie = least_misery.iloc[0:5]['title']
additive = final.copy()
additive= additive.groupby(['item','title']).sum()
additive = additive.sort_values(by="predicted_rating", ascending=False).reset_index()
additive_movie = additive.iloc[0:5]['title']
fairnesss = fairness()
print("#FAIR")
for uid, title in fairnesss:
print("The movie {} is the most favorite movie of user {}".format(title, uid))
print("#ADD: ")
print("The movies: {} was recommended to you because they have highest additive rating within your group".format(list(additive_movie)))
print("#LEAST: ")
print("The movies: {} was recommended to you because they are everyones' preferences ".format(list(least_misery_movie)))
print("#MOST: ")
print("The movies: {} was recommended to you because they are the most loved".format(list(most_pleasure_movie)))
_____no_output_____gen_rec_and_explain()#FAIR
The movie 12 angry men is the most favorite movie of user 387
The movie eternal sunshine of the spotless mind is the most favorite movie of user 384
The movie amadeus is the most favorite movie of user 460
The movie guess who's coming to dinner is the most favorite movie of user 76
The movie kiss kiss bang bang is the most favorite movie of user 151
#ADD:
The movies: ['eternal sunshine of the spotless mind', 'kiss kiss bang bang', "guess who's coming to dinner", 'ran', "schindler's list"] was recommended to you because they have highest additive rating within your group
#LEAST:
The movies: ['eternal sunshine of the spotless mind', "guess who's coming to dinner", 'dead poets society', 'ran', "all the president's men"] was recommended to you because they are everyones' preferences
#MOST:
The movies: ['kiss kiss bang bang', 'amadeus', 'whiplash', "schindler's list", 'saving private ryan'] was recommended to you because they are the most loved
import itertools
from lenskit.algorithms import Recommender
from lenskit.algorithms.user_knn import UserUser
user_user = UserUser(15, min_nbrs=3) # Minimum (3) and maximum (15) number of neighbors to consider
recsys = Recommender.adapt(user_user)
recsys.fit(ratings_df)
group_unseen_df = pd.DataFrame(list(itertools.product(group_users_ids, group_unrated_movies_ids)), columns=['user', 'item'])
group_unseen_df['predicted_rating'] = recsys.predict(group_unseen_df)
group_unseen_df = group_unseen_df.loc[group_unseen_df['predicted_rating'].notnull()]
display(group_unseen_df)_____no_output_____group_unseen_df_____no_output_____group_unseen_df.groupby('item').sum()_____no_output_____additive_df = group_unseen_df.groupby('item').sum()
additive_df = additive_df.join(movies_df['title'], on='item')
additive_df = additive_df.sort_values(by="predicted_rating", ascending=False).reset_index()[['item', 'title', 'predicted_rating']]
display(additive_df.head(10))_____no_output_____additive_df = group_unseen_df.groupby('item').sum()_____no_output_____additive_df_____no_output_____movies_df.loc[movies_df['item'] == 177593]_____no_output_____
</code>
| {
"repository": "hoangkimkhai276/recsys",
"path": "GRS.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 53903,
"hexsha": "d0e91645b9718de99b4f6137f57126c29356d08e",
"max_line_length": 245,
"avg_line_length": 32.8076688984,
"alphanum_fraction": 0.4055432907
} |
# Notebook from abostroem/AstronomicalData
Path: 07_plot.ipynb
# Chapter 7
This is the seventh in a series of notebooks related to astronomy data.
As a continuing example, we will replicate part of the analysis in a recent paper, "[Off the beaten path: Gaia reveals GD-1 stars outside of the main stream](https://arxiv.org/abs/1805.00425)" by Adrian M. Price-Whelan and Ana Bonaca.
In the previous notebook we selected photometry data from Pan-STARRS and used it to identify stars we think are likely to be in GD-1
In this notebook, we'll take the results from previous lessons and use them to make a figure that tells a compelling scientific story._____no_output_____## Outline
Here are the steps in this notebook:
1. Starting with the figure from the previous notebook, we'll add annotations to present the results more clearly.
2. The we'll see several ways to customize figures to make them more appealing and effective.
3. Finally, we'll see how to make a figure with multiple panels or subplots.
After completing this lesson, you should be able to
* Design a figure that tells a compelling story.
* Use Matplotlib features to customize the appearance of figures.
* Generate a figure with multiple subplots._____no_output_____## Installing libraries
If you are running this notebook on Colab, you can run the following cell to install Astroquery and the other libraries we'll use.
If you are running this notebook on your own computer, you might have to install these libraries yourself. See the instructions in the preface._____no_output_____
<code>
# If we're running on Colab, install libraries
import sys
IN_COLAB = 'google.colab' in sys.modules
if IN_COLAB:
!pip install astroquery astro-gala pyia python-wget_____no_output_____
</code>
## Making Figures That Tell a Story
So far the figure we've made have been "quick and dirty". Mostly we have used Matplotlib's default style, although we have adjusted a few parameters, like `markersize` and `alpha`, to improve legibility.
Now that the analysis is done, it's time to think more about:
1. Making professional-looking figures that are ready for publication, and
2. Making figures that communicate a scientific result clearly and compellingly.
Not necessarily in that order._____no_output_____Let's start by reviewing Figure 1 from the original paper. We've seen the individual panels, but now let's look at the whole thing, along with the caption:
<img width="500" src="https://github.com/datacarpentry/astronomy-python/raw/gh-pages/fig/gd1-5.png">_____no_output_____**Exercise:** Think about the following questions:
1. What is the primary scientific result of this work?
2. What story is this figure telling?
3. In the design of this figure, can you identify 1-2 choices the authors made that you think are effective? Think about big-picture elements, like the number of panels and how they are arranged, as well as details like the choice of typeface.
4. Can you identify 1-2 elements that could be improved, or that you might have done differently?_____no_output_____Some topics that might come up in this discussion:
1. The primary result is that the multiple stages of selection make it possible to separate likely candidates from the background more effectively than in previous work, which makes it possible to see the structure of GD-1 in "unprecedented detail".
2. The figure documents the selection process as a sequence of steps. Reading right-to-left, top-to-bottom, we see selection based on proper motion, the results of the first selection, selection based on color and magnitude, and the results of the second selection. So this figure documents the methodology and presents the primary result.
3. It's mostly black and white, with minimal use of color, so it will work well in print. The annotations in the bottom left panel guide the reader to the most important results. It contains enough technical detail for a professional audience, but most of it is also comprehensible to a more general audience. The two left panels have the same dimensions and their axes are aligned.
4. Since the panels represent a sequence, it might be better to arrange them left-to-right. The placement and size of the axis labels could be tweaked. The entire figure could be a little bigger to match the width and proportion of the caption. The top left panel has unnused white space (but that leaves space for the annotations in the bottom left)._____no_output_____## Plotting GD-1
Let's start with the panel in the lower left. The following cell reloads the data._____no_output_____
<code>
import os
from wget import download
filename = 'gd1_merged.hdf5'
path = 'https://github.com/AllenDowney/AstronomicalData/raw/main/data/'
if not os.path.exists(filename):
print(download(path+filename))_____no_output_____import pandas as pd
selected = pd.read_hdf(filename, 'selected')_____no_output_____import matplotlib.pyplot as plt
def plot_second_selection(df):
x = df['phi1']
y = df['phi2']
plt.plot(x, y, 'ko', markersize=0.7, alpha=0.9)
plt.xlabel('$\phi_1$ [deg]')
plt.ylabel('$\phi_2$ [deg]')
plt.title('Proper motion + photometry selection', fontsize='medium')
plt.axis('equal')_____no_output_____
</code>
And here's what it looks like._____no_output_____
<code>
plt.figure(figsize=(10,2.5))
plot_second_selection(selected)_____no_output_____
</code>
## Annotations
The figure in the paper uses three other features to present the results more clearly and compellingly:
* A vertical dashed line to distinguish the previously undetected region of GD-1,
* A label that identifies the new region, and
* Several annotations that combine text and arrows to identify features of GD-1.
As an exercise, choose any or all of these features and add them to the figure:
* To draw vertical lines, see [`plt.vlines`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.vlines.html) and [`plt.axvline`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.axvline.html#matplotlib.pyplot.axvline).
* To add text, see [`plt.text`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.text.html).
* To add an annotation with text and an arrow, see [plt.annotate]().
And here is some [additional information about text and arrows](https://matplotlib.org/3.3.1/tutorials/text/annotations.html#plotting-guide-annotation)._____no_output_____
<code>
# Solution
# plt.axvline(-55, ls='--', color='gray',
# alpha=0.4, dashes=(6,4), lw=2)
# plt.text(-60, 5.5, 'Previously\nundetected',
# fontsize='small', ha='right', va='top');
# arrowprops=dict(color='gray', shrink=0.05, width=1.5,
# headwidth=6, headlength=8, alpha=0.4)
# plt.annotate('Spur', xy=(-33, 2), xytext=(-35, 5.5),
# arrowprops=arrowprops,
# fontsize='small')
# plt.annotate('Gap', xy=(-22, -1), xytext=(-25, -5.5),
# arrowprops=arrowprops,
# fontsize='small')_____no_output_____
</code>
## Customization
Matplotlib provides a default style that determines things like the colors of lines, the placement of labels and ticks on the axes, and many other properties.
There are several ways to override these defaults and customize your figures:
* To customize only the current figure, you can call functions like `tick_params`, which we'll demonstrate below.
* To customize all figures in a notebook, you use `rcParams`.
* To override more than a few defaults at the same time, you can use a style sheet._____no_output_____As a simple example, notice that Matplotlib puts ticks on the outside of the figures by default, and only on the left and bottom sides of the axes.
To change this behavior, you can use `gca()` to get the current axes and `tick_params` to change the settings.
Here's how you can put the ticks on the inside of the figure:
```
plt.gca().tick_params(direction='in')
```
**Exercise:** Read the documentation of [`tick_params`](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.axes.Axes.tick_params.html) and use it to put ticks on the top and right sides of the axes._____no_output_____
<code>
# Solution
# plt.gca().tick_params(top=True, right=True)_____no_output_____
</code>
## rcParams
If you want to make a customization that applies to all figures in a notebook, you can use `rcParams`.
Here's an example that reads the current font size from `rcParams`:_____no_output_____
<code>
plt.rcParams['font.size']_____no_output_____
</code>
And sets it to a new value:_____no_output_____
<code>
plt.rcParams['font.size'] = 14_____no_output_____
</code>
**Exercise:** Plot the previous figure again, and see what font sizes have changed. Look up any other element of `rcParams`, change its value, and check the effect on the figure._____no_output_____If you find yourself making the same customizations in several notebooks, you can put changes to `rcParams` in a `matplotlibrc` file, [which you can read about here](https://matplotlib.org/3.3.1/tutorials/introductory/customizing.html#customizing-with-matplotlibrc-files)._____no_output_____## Style sheets
The `matplotlibrc` file is read when you import Matplotlib, so it is not easy to switch from one set of options to another.
The solution to this problem is style sheets, [which you can read about here](https://matplotlib.org/3.1.1/tutorials/introductory/customizing.html).
Matplotlib provides a set of predefined style sheets, or you can make your own.
The following cell displays a list of style sheets installed on your system._____no_output_____
<code>
plt.style.available_____no_output_____
</code>
Note that `seaborn-paper`, `seaborn-talk` and `seaborn-poster` are particularly intended to prepare versions of a figure with text sizes and other features that work well in papers, talks, and posters.
To use any of these style sheets, run `plt.style.use` like this:
```
plt.style.use('fivethirtyeight')
```_____no_output_____The style sheet you choose will affect the appearance of all figures you plot after calling `use`, unless you override any of the options or call `use` again.
**Exercise:** Choose one of the styles on the list and select it by calling `use`. Then go back and plot one of the figures above and see what effect it has._____no_output_____If you can't find a style sheet that's exactly what you want, you can make your own. This repository includes a style sheet called `az-paper-twocol.mplstyle`, with customizations chosen by Azalee Bostroem for publication in astronomy journals.
The following cell downloads the style sheet._____no_output_____
<code>
import os
filename = 'az-paper-twocol.mplstyle'
path = 'https://github.com/AllenDowney/AstronomicalData/raw/main/data/'
if not os.path.exists(filename):
print(download(path+filename))_____no_output_____
</code>
You can use it like this:
```
plt.style.use('./az-paper-twocol.mplstyle')
```
The prefix `./` tells Matplotlib to look for the file in the current directory._____no_output_____As an alternative, you can install a style sheet for your own use by putting it in your configuration directory. To find out where that is, you can run the following command:
```
import matplotlib as mpl
mpl.get_configdir()
```_____no_output_____## LaTeX fonts
When you include mathematical expressions in titles, labels, and annotations, Matplotlib uses [`mathtext`](https://matplotlib.org/3.1.0/tutorials/text/mathtext.html) to typeset them. `mathtext` uses the same syntax as LaTeX, but it provides only a subset of its features.
If you need features that are not provided by `mathtext`, or you prefer the way LaTeX typesets mathematical expressions, you can customize Matplotlib to use LaTeX.
In `matplotlibrc` or in a style sheet, you can add the following line:
```
text.usetex : true
```
Or in a notebook you can run the following code.
```
plt.rcParams['text.usetex'] = True
```_____no_output_____
<code>
plt.rcParams['text.usetex'] = True_____no_output_____
</code>
If you go back and draw the figure again, you should see the difference.
If you get an error message like
```
LaTeX Error: File `type1cm.sty' not found.
```
You might have to install a package that contains the fonts LaTeX needs. On some systems, the packages `texlive-latex-extra` or `cm-super` might be what you need. [See here for more help with this](https://stackoverflow.com/questions/11354149/python-unable-to-render-tex-in-matplotlib).
In case you are curious, `cm` stands for [Computer Modern](https://en.wikipedia.org/wiki/Computer_Modern), the font LaTeX uses to typeset math._____no_output_____## Multiple panels
So far we've been working with one figure at a time, but the figure we are replicating contains multiple panels, also known as "subplots".
Confusingly, Matplotlib provides *three* functions for making figures like this: `subplot`, `subplots`, and `subplot2grid`.
* [`subplot`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.subplot.html) is simple and similar to MATLAB, so if you are familiar with that interface, you might like `subplot`
* [`subplots`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.subplots.html) is more object-oriented, which some people prefer.
* [`subplot2grid`](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.subplot2grid.html) is most convenient if you want to control the relative sizes of the subplots.
So we'll use `subplot2grid`.
All of these functions are easier to use if we put the code that generates each panel in a function._____no_output_____## Upper right
To make the panel in the upper right, we have to reload `centerline`._____no_output_____
<code>
import os
filename = 'gd1_dataframe.hdf5'
path = 'https://github.com/AllenDowney/AstronomicalData/raw/main/data/'
if not os.path.exists(filename):
print(download(path+filename))_____no_output_____import pandas as pd
centerline = pd.read_hdf(filename, 'centerline')_____no_output_____
</code>
And define the coordinates of the rectangle we selected._____no_output_____
<code>
pm1_min = -8.9
pm1_max = -6.9
pm2_min = -2.2
pm2_max = 1.0
pm1_rect = [pm1_min, pm1_min, pm1_max, pm1_max]
pm2_rect = [pm2_min, pm2_max, pm2_max, pm2_min]_____no_output_____
</code>
To plot this rectangle, we'll use a feature we have not seen before: `Polygon`, which is provided by Matplotlib.
To create a `Polygon`, we have to put the coordinates in an array with `x` values in the first column and `y` values in the second column. _____no_output_____
<code>
import numpy as np
vertices = np.transpose([pm1_rect, pm2_rect])
vertices_____no_output_____
</code>
The following function takes a `DataFrame` as a parameter, plots the proper motion for each star, and adds a shaded `Polygon` to show the region we selected._____no_output_____
<code>
from matplotlib.patches import Polygon
def plot_proper_motion(df):
pm1 = df['pm_phi1']
pm2 = df['pm_phi2']
plt.plot(pm1, pm2, 'ko', markersize=0.3, alpha=0.3)
poly = Polygon(vertices, closed=True,
facecolor='C1', alpha=0.4)
plt.gca().add_patch(poly)
plt.xlabel('$\mu_{\phi_1} [\mathrm{mas~yr}^{-1}]$')
plt.ylabel('$\mu_{\phi_2} [\mathrm{mas~yr}^{-1}]$')
plt.xlim(-12, 8)
plt.ylim(-10, 10)_____no_output_____
</code>
Notice that `add_patch` is like `invert_yaxis`; in order to call it, we have to use `gca` to get the current axes.
Here's what the new version of the figure looks like. We've changed the labels on the axes to be consistent with the paper._____no_output_____
<code>
plt.rcParams['text.usetex'] = False
plt.style.use('default')
plot_proper_motion(centerline)_____no_output_____
</code>
## Upper left
Now let's work on the panel in the upper left. We have to reload `candidates`._____no_output_____
<code>
import os
filename = 'gd1_candidates.hdf5'
path = 'https://github.com/AllenDowney/AstronomicalData/raw/main/data/'
if not os.path.exists(filename):
print(download(path+filename))_____no_output_____import pandas as pd
filename = 'gd1_candidates.hdf5'
candidate_df = pd.read_hdf(filename, 'candidate_df')_____no_output_____
</code>
Here's a function that takes a `DataFrame` of candidate stars and plots their positions in GD-1 coordindates. _____no_output_____
<code>
def plot_first_selection(df):
x = df['phi1']
y = df['phi2']
plt.plot(x, y, 'ko', markersize=0.3, alpha=0.3)
plt.xlabel('$\phi_1$ [deg]')
plt.ylabel('$\phi_2$ [deg]')
plt.title('Proper motion selection', fontsize='medium')
plt.axis('equal')_____no_output_____
</code>
And here's what it looks like._____no_output_____
<code>
plot_first_selection(candidate_df)_____no_output_____
</code>
## Lower right
For the figure in the lower right, we need to reload the merged `DataFrame`, which contains data from Gaia and photometry data from Pan-STARRS._____no_output_____
<code>
import pandas as pd
filename = 'gd1_merged.hdf5'
merged = pd.read_hdf(filename, 'merged')_____no_output_____
</code>
From the previous notebook, here's the function that plots the color-magnitude diagram._____no_output_____
<code>
import matplotlib.pyplot as plt
def plot_cmd(table):
"""Plot a color magnitude diagram.
table: Table or DataFrame with photometry data
"""
y = table['g_mean_psf_mag']
x = table['g_mean_psf_mag'] - table['i_mean_psf_mag']
plt.plot(x, y, 'ko', markersize=0.3, alpha=0.3)
plt.xlim([0, 1.5])
plt.ylim([14, 22])
plt.gca().invert_yaxis()
plt.ylabel('$g_0$')
plt.xlabel('$(g-i)_0$')_____no_output_____
</code>
And here's what it looks like._____no_output_____
<code>
plot_cmd(merged)_____no_output_____
</code>
**Exercise:** Add a few lines to `plot_cmd` to show the Polygon we selected as a shaded area.
Run these cells to get the polygon coordinates we saved in the previous notebook._____no_output_____
<code>
import os
filename = 'gd1_polygon.hdf5'
path = 'https://github.com/AllenDowney/AstronomicalData/raw/main/data/'
if not os.path.exists(filename):
print(download(path+filename))_____no_output_____coords_df = pd.read_hdf(filename, 'coords_df')
coords = coords_df.to_numpy()
coords_____no_output_____# Solution
#poly = Polygon(coords, closed=True,
# facecolor='C1', alpha=0.4)
#plt.gca().add_patch(poly)_____no_output_____
</code>
## Subplots
Now we're ready to put it all together. To make a figure with four subplots, we'll use `subplot2grid`, [which requires two arguments](https://matplotlib.org/3.3.1/api/_as_gen/matplotlib.pyplot.subplot2grid.html):
* `shape`, which is a tuple with the number of rows and columns in the grid, and
* `loc`, which is a tuple identifying the location in the grid we're about to fill.
In this example, `shape` is `(2, 2)` to create two rows and two columns.
For the first panel, `loc` is `(0, 0)`, which indicates row 0 and column 0, which is the upper-left panel.
Here's how we use it to draw the four panels._____no_output_____
<code>
shape = (2, 2)
plt.subplot2grid(shape, (0, 0))
plot_first_selection(candidate_df)
plt.subplot2grid(shape, (0, 1))
plot_proper_motion(centerline)
plt.subplot2grid(shape, (1, 0))
plot_second_selection(selected)
plt.subplot2grid(shape, (1, 1))
plot_cmd(merged)
poly = Polygon(coords, closed=True,
facecolor='C1', alpha=0.4)
plt.gca().add_patch(poly)
plt.tight_layout()_____no_output_____
</code>
We use [`plt.tight_layout`](https://matplotlib.org/3.3.1/tutorials/intermediate/tight_layout_guide.html) at the end, which adjusts the sizes of the panels to make sure the titles and axis labels don't overlap.
**Exercise:** See what happens if you leave out `tight_layout`._____no_output_____## Adjusting proportions
In the previous figure, the panels are all the same size. To get a better view of GD-1, we'd like to stretch the panels on the left and compress the ones on the right.
To do that, we'll use the `colspan` argument to make a panel that spans multiple columns in the grid.
In the following example, `shape` is `(2, 4)`, which means 2 rows and 4 columns.
The panels on the left span three columns, so they are three times wider than the panels on the right.
At the same time, we use `figsize` to adjust the aspect ratio of the whole figure._____no_output_____
<code>
plt.figure(figsize=(9, 4.5))
shape = (2, 4)
plt.subplot2grid(shape, (0, 0), colspan=3)
plot_first_selection(candidate_df)
plt.subplot2grid(shape, (0, 3))
plot_proper_motion(centerline)
plt.subplot2grid(shape, (1, 0), colspan=3)
plot_second_selection(selected)
plt.subplot2grid(shape, (1, 3))
plot_cmd(merged)
poly = Polygon(coords, closed=True,
facecolor='C1', alpha=0.4)
plt.gca().add_patch(poly)
plt.tight_layout()_____no_output_____
</code>
This is looking more and more like the figure in the paper.
**Exercise:** In this example, the ratio of the widths of the panels is 3:1. How would you adjust it if you wanted the ratio to be 3:2?_____no_output_____## Summary
In this notebook, we reverse-engineered the figure we've been replicating, identifying elements that seem effective and others that could be improved.
We explored features Matplotlib provides for adding annotations to figures -- including text, lines, arrows, and polygons -- and several ways to customize the appearance of figures. And we learned how to create figures that contain multiple panels._____no_output_____## Best practices
* The most effective figures focus on telling a single story clearly and compellingly.
* Consider using annotations to guide the readers attention to the most important elements of a figure.
* The default Matplotlib style generates good quality figures, but there are several ways you can override the defaults.
* If you find yourself making the same customizations on several projects, you might want to create your own style sheet._____no_output_____
| {
"repository": "abostroem/AstronomicalData",
"path": "07_plot.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 779143,
"hexsha": "d0ea349405e4a96a3b5ce1fe18b9b38436a4681c",
"max_line_length": 204684,
"avg_line_length": 653.6434563758,
"alphanum_fraction": 0.9485203615
} |
# Notebook from AlekseiMikhalev/lessons
Path: notebooks/03_APIs/02_ML_Scripts/02_PT_ML_Scripts.ipynb
# ML Scripts
So far, we've done everything inside the Jupyter notebooks but we're going to now move our code into individual python scripts. We will lay out the code that needs to be inside each script but checkout the `API` lesson to see how it all comes together._____no_output_____<div align="left">
<a href="https://github.com/madewithml/lessons/blob/master/notebooks/03_APIs/02_ML_Scripts/02_PT_ML_Scripts.ipynb" role="button"><img class="notebook-badge-image" src="https://img.shields.io/static/v1?label=&message=View%20On%20GitHub&color=586069&logo=github&labelColor=2f363d"></a>
<a href="https://colab.research.google.com/github/madewithml/lessons/blob/master/notebooks/03_APIs/02_ML_Scripts/02_PT_ML_Scripts.ipynb"><img class="notebook-badge-image" src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a>
</div>_____no_output_____# data.py_____no_output_____## Load data_____no_output_____
<code>
import numpy as np
import pandas as pd
import random
import urllib_____no_output_____SEED = 1234
DATA_FILE = 'news.csv'
INPUT_FEATURE = 'title'
OUTPUT_FEATURE = 'category'_____no_output_____# Set seed for reproducibility
np.random.seed(SEED)
random.seed(SEED)_____no_output_____# Load data from GitHub to notebook's local drive
url = "https://raw.githubusercontent.com/madewithml/lessons/master/data/news.csv"
response = urllib.request.urlopen(url)
html = response.read()
with open(DATA_FILE, 'wb') as fp:
fp.write(html)_____no_output_____# Load data
df = pd.read_csv(DATA_FILE, header=0)
X = df[INPUT_FEATURE].values
y = df[OUTPUT_FEATURE].values
df.head(5)_____no_output_____
</code>
## Preprocessing_____no_output_____
<code>
import re_____no_output_____LOWER = True
FILTERS = r"[!\"'#$%&()*\+,-./:;<=>?@\\\[\]^_`{|}~]"_____no_output_____def preprocess_texts(texts, lower, filters):
preprocessed_texts = []
for text in texts:
if lower:
text = ' '.join(word.lower() for word in text.split(" "))
text = re.sub(r"([.,!?])", r" \1 ", text)
text = re.sub(filters, r"", text)
text = re.sub(' +', ' ', text) # remove multiple spaces
text = text.strip()
preprocessed_texts.append(text)
return preprocessed_texts_____no_output_____original_text = X[0]
X = np.array(preprocess_texts(X, lower=LOWER, filters=FILTERS))
print (f"{original_text} → {X[0]}")Wall St. Bears Claw Back Into the Black (Reuters) → wall st bears claw back into the black reuters
</code>
## Split data_____no_output_____
<code>
import collections
from sklearn.model_selection import train_test_split_____no_output_____TRAIN_SIZE = 0.7
VAL_SIZE = 0.15
TEST_SIZE = 0.15
SHUFFLE = True_____no_output_____def train_val_test_split(X, y, val_size, test_size, shuffle):
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=test_size, stratify=y, shuffle=shuffle)
X_train, X_val, y_train, y_val = train_test_split(
X_train, y_train, test_size=val_size, stratify=y_train, shuffle=shuffle)
return X_train, X_val, X_test, y_train, y_val, y_test_____no_output_____# Create data splits
X_train, X_val, X_test, y_train, y_val, y_test = train_val_test_split(
X=X, y=y, val_size=VAL_SIZE, test_size=TEST_SIZE, shuffle=SHUFFLE)
class_counts = dict(collections.Counter(y))
print (f"X_train: {X_train.shape}, y_train: {y_train.shape}")
print (f"X_val: {X_val.shape}, y_val: {y_val.shape}")
print (f"X_test: {X_test.shape}, y_test: {y_test.shape}")
print (f"{X_train[0]} → {y_train[0]}")
print (f"Classes: {class_counts}")X_train: (86700,), y_train: (86700,)
X_val: (15300,), y_val: (15300,)
X_test: (18000,), y_test: (18000,)
pga overhauls system for ryder cup points → Sports
Classes: {'Business': 30000, 'Sci/Tech': 30000, 'Sports': 30000, 'World': 30000}
</code>
# tokenizers.py_____no_output_____## Tokenizer_____no_output_____
<code>
import json
import re_____no_output_____SEPARATOR = ' ' # word level_____no_output_____class Tokenizer(object):
def __init__(self, separator, pad_token='<PAD>', oov_token='<UNK>',
token_to_index={'<PAD>': 0, '<UNK>': 1}):
self.separator = separator
self.oov_token = oov_token
self.token_to_index = token_to_index
self.index_to_token = {v: k for k, v in self.token_to_index.items()}
def __len__(self):
return len(self.token_to_index)
def __str__(self):
return f"<Tokenizer(num_tokens={len(self)})>"
def fit_on_texts(self, texts):
for text in texts:
for token in text.split(self.separator):
if token not in self.token_to_index:
index = len(self)
self.token_to_index[token] = index
self.index_to_token[index] = token
return self
def texts_to_sequences(self, texts):
sequences = []
for text in texts:
sequence = []
for token in text.split(self.separator):
sequence.append(self.token_to_index.get(
token, self.token_to_index[self.oov_token]))
sequences.append(sequence)
return sequences
def sequences_to_texts(self, sequences):
texts = []
for sequence in sequences:
text = []
for index in sequence:
text.append(self.index_to_token.get(index, self.oov_token))
texts.append(self.separator.join([token for token in text]))
return texts
def save(self, fp):
with open(fp, 'w') as fp:
contents = {
'separator': self.separator,
'oov_token': self.oov_token,
'token_to_index': self.token_to_index
}
json.dump(contents, fp, indent=4, sort_keys=False)
@classmethod
def load(cls, fp):
with open(fp, 'r') as fp:
kwargs = json.load(fp=fp)
return cls(**kwargs)_____no_output_____# Input vectorizer
X_tokenizer = Tokenizer(separator=SEPARATOR)
X_tokenizer.fit_on_texts(texts=X_train)
vocab_size = len(X_tokenizer)
print (X_tokenizer)<Tokenizer(num_tokens=35635)>
# Convert text to sequence of tokens
original_text = X_train[0]
X_train = np.array(X_tokenizer.texts_to_sequences(X_train))
X_val = np.array(X_tokenizer.texts_to_sequences(X_val))
X_test = np.array(X_tokenizer.texts_to_sequences(X_test))
preprocessed_text = X_tokenizer.sequences_to_texts([X_train[0]])
print (f"{original_text} \n\t→ {preprocessed_text} \n\t→ {X_train[0]}")pga overhauls system for ryder cup points
→ ['pga overhauls system for ryder cup points']
→ [2, 3, 4, 5, 6, 7, 8]
# Save tokenizer
X_tokenizer.save(fp='X_tokenizer.json')_____no_output_____# Load tokenizer
X_tokenizer = Tokenizer.load(fp='X_tokenizer.json')
print (X_tokenizer)<Tokenizer(num_tokens=35635)>
</code>
## Label Encoder_____no_output_____
<code>
class LabelEncoder(object):
def __init__(self, class_to_index={}):
self.class_to_index = class_to_index
self.index_to_class = {v: k for k, v in self.class_to_index.items()}
self.classes = list(self.class_to_index.keys())
def __len__(self):
return len(self.class_to_index)
def __str__(self):
return f"<LabelEncoder(num_classes={len(self)})>"
def fit(self, y_train):
for i, class_ in enumerate(np.unique(y_train)):
self.class_to_index[class_] = i
self.index_to_class = {v: k for k, v in self.class_to_index.items()}
self.classes = list(self.class_to_index.keys())
return self
def transform(self, y):
return np.array([self.class_to_index[class_] for class_ in y])
def decode(self, index):
return self.index_to_class.get(index, None)
def save(self, fp):
with open(fp, 'w') as fp:
contents = {
'class_to_index': self.class_to_index
}
json.dump(contents, fp, indent=4, sort_keys=False)
@classmethod
def load(cls, fp):
with open(fp, 'r') as fp:
kwargs = json.load(fp=fp)
return cls(**kwargs)_____no_output_____# Output vectorizer
y_tokenizer = LabelEncoder()_____no_output_____# Fit on train data
y_tokenizer = y_tokenizer.fit(y_train)
print (y_tokenizer)
classes = y_tokenizer.classes
print (f"classes: {classes}")<LabelEncoder(num_classes=4)>
classes: ['Business', 'Sci/Tech', 'Sports', 'World']
# Convert labels to tokens
class_ = y_train[0]
y_train = y_tokenizer.transform(y_train)
y_val = y_tokenizer.transform(y_val)
y_test = y_tokenizer.transform(y_test)
print (f"{class_} → {y_train[0]}")Sports → 2
# Class weights
counts = np.bincount(y_train)
class_weights = {i: 1.0/count for i, count in enumerate(counts)}
print (f"class counts: {counts},\nclass weights: {class_weights}")class counts: [21675 21675 21675 21675],
class weights: {0: 4.61361014994233e-05, 1: 4.61361014994233e-05, 2: 4.61361014994233e-05, 3: 4.61361014994233e-05}
# Save label encoder
y_tokenizer.save(fp='y_tokenizer.json')_____no_output_____# Load label encoder
y_tokenizer = LabelEncoder.load(fp='y_tokenizer.json')
print (y_tokenizer)<LabelEncoder(num_classes=4)>
</code>
# datasets.py_____no_output_____
<code>
import math
import torch
import torch.nn as nn
from torch.utils.data import Dataset
from torch.utils.data import DataLoader_____no_output_____BATCH_SIZE = 128
FILTER_SIZES = [2, 3, 4]_____no_output_____# Set seed for reproducibility
torch.manual_seed(SEED)
torch.cuda.manual_seed(SEED)
torch.cuda.manual_seed_all(SEED) # multi-GPU.
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True_____no_output_____USE_CUDA = True
DEVICE = torch.device('cuda' if (torch.cuda.is_available() and USE_CUDA) else 'cpu')
print (DEVICE)cuda
</code>
## Pad_____no_output_____
<code>
def pad_sequences(X, max_seq_len):
sequences = np.zeros((len(X), max_seq_len))
for i, sequence in enumerate(X):
sequences[i][:len(sequence)] = sequence
return sequences _____no_output_____# Pad sequences
inputs = [[1,2,3], [1,2,3,4], [1,2]]
max_seq_len = max(len(x) for x in inputs)
padded_inputs = pad_sequences(X=inputs, max_seq_len=max_seq_len)
print (padded_inputs.shape)
print (padded_inputs)(3, 4)
[[1. 2. 3. 0.]
[1. 2. 3. 4.]
[1. 2. 0. 0.]]
</code>
## Dataset_____no_output_____
<code>
class TextDataset(Dataset):
def __init__(self, X, y, batch_size, max_filter_size):
self.X = X
self.y = y
self.batch_size = batch_size
self.max_filter_size = max_filter_size
def __len__(self):
return len(self.y)
def __str__(self):
return f"<Dataset(N={len(self)}, batch_size={self.batch_size}, num_batches={self.get_num_batches()})>"
def __getitem__(self, index):
X = self.X[index]
y = self.y[index]
return X, y
def get_num_batches(self):
return math.ceil(len(self)/self.batch_size)
def collate_fn(self, batch):
"""Processing on a batch."""
# Get inputs
X = np.array(batch)[:, 0]
y = np.array(batch)[:, 1]
# Pad inputs
max_seq_len = max(self.max_filter_size, max([len(x) for x in X]))
X = pad_sequences(X=X, max_seq_len=max_seq_len)
return X, y
def generate_batches(self, shuffle=False, drop_last=False):
dataloader = DataLoader(dataset=self, batch_size=self.batch_size,
collate_fn=self.collate_fn, shuffle=shuffle,
drop_last=drop_last, pin_memory=True)
for (X, y) in dataloader:
X = torch.LongTensor(X.astype(np.int32))
y = torch.LongTensor(y.astype(np.int32))
yield X, y_____no_output_____# Create datasets
train_set = TextDataset(X=X_train, y=y_train, batch_size=BATCH_SIZE, max_filter_size=max(FILTER_SIZES))
val_set = TextDataset(X=X_val, y=y_val, batch_size=BATCH_SIZE, max_filter_size=max(FILTER_SIZES))
test_set = TextDataset(X=X_test, y=y_test, batch_size=BATCH_SIZE, max_filter_size=max(FILTER_SIZES))
print (train_set)
print (train_set[0])<Dataset(N=86700, batch_size=128, num_batches=678)>
([2, 3, 4, 5, 6, 7, 8], 2)
# Generate batch
batch_X, batch_y = next(iter(test_set.generate_batches()))
print (batch_X.shape)
print (batch_y.shape)torch.Size([128, 13])
torch.Size([128])
</code>
# utils.py_____no_output_____## Embeddings_____no_output_____
<code>
from io import BytesIO
from urllib.request import urlopen
from zipfile import ZipFile_____no_output_____EMBEDDING_DIM = 100_____no_output_____def load_glove_embeddings(embeddings_file):
"""Load embeddings from a file."""
embeddings = {}
with open(embeddings_file, "r") as fp:
for index, line in enumerate(fp):
values = line.split()
word = values[0]
embedding = np.asarray(values[1:], dtype='float32')
embeddings[word] = embedding
return embeddings_____no_output_____def make_embeddings_matrix(embeddings, token_to_index, embedding_dim):
"""Create embeddings matrix to use in Embedding layer."""
embedding_matrix = np.zeros((len(token_to_index), embedding_dim))
for word, i in token_to_index.items():
embedding_vector = embeddings.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
return embedding_matrix_____no_output_____# Unzip the file (may take ~3-5 minutes)
resp = urlopen('http://nlp.stanford.edu/data/glove.6B.zip')
zipfile = ZipFile(BytesIO(resp.read()))
zipfile.namelist()_____no_output_____# Write embeddings to file
embeddings_file = 'glove.6B.{0}d.txt'.format(EMBEDDING_DIM)
zipfile.extract(embeddings_file)
!lsglove.6B.100d.txt news.csv sample_data X_tokenizer.json y_tokenizer.json
# Create embeddings
embeddings_file = 'glove.6B.{0}d.txt'.format(EMBEDDING_DIM)
glove_embeddings = load_glove_embeddings(embeddings_file=embeddings_file)
embedding_matrix = make_embeddings_matrix(
embeddings=glove_embeddings, token_to_index=X_tokenizer.token_to_index,
embedding_dim=EMBEDDING_DIM)
print (embedding_matrix.shape)(35635, 100)
</code>
# model.py_____no_output_____## Model_____no_output_____
<code>
import torch.nn.functional as F_____no_output_____NUM_FILTERS = 50
HIDDEN_DIM = 128
DROPOUT_P = 0.1_____no_output_____class TextCNN(nn.Module):
def __init__(self, embedding_dim, vocab_size, num_filters, filter_sizes,
hidden_dim, dropout_p, num_classes, pretrained_embeddings=None,
freeze_embeddings=False, padding_idx=0):
super(TextCNN, self).__init__()
# Initialize embeddings
if pretrained_embeddings is None:
self.embeddings = nn.Embedding(
embedding_dim=embedding_dim, num_embeddings=vocab_size,
padding_idx=padding_idx)
else:
pretrained_embeddings = torch.from_numpy(pretrained_embeddings).float()
self.embeddings = nn.Embedding(
embedding_dim=embedding_dim, num_embeddings=vocab_size,
padding_idx=padding_idx, _weight=pretrained_embeddings)
# Freeze embeddings or not
if freeze_embeddings:
self.embeddings.weight.requires_grad = False
# Conv weights
self.filter_sizes = filter_sizes
self.conv = nn.ModuleList(
[nn.Conv1d(in_channels=embedding_dim,
out_channels=num_filters,
kernel_size=f) for f in filter_sizes])
# FC weights
self.dropout = nn.Dropout(dropout_p)
self.fc1 = nn.Linear(num_filters*len(filter_sizes), hidden_dim)
self.fc2 = nn.Linear(hidden_dim, num_classes)
def forward(self, x_in, channel_first=False):
# Embed
x_in = self.embeddings(x_in)
if not channel_first:
x_in = x_in.transpose(1, 2) # (N, channels, sequence length)
# Conv + pool
z = []
conv_outputs = [] # for interpretability
max_seq_len = x_in.shape[2]
for i, f in enumerate(self.filter_sizes):
# `SAME` padding
padding_left = int((self.conv[i].stride[0]*(max_seq_len-1) - max_seq_len + self.filter_sizes[i])/2)
padding_right = int(math.ceil((self.conv[i].stride[0]*(max_seq_len-1) - max_seq_len + self.filter_sizes[i])/2))
# Conv + pool
_z = self.conv[i](F.pad(x_in, (padding_left, padding_right)))
conv_outputs.append(_z)
_z = F.max_pool1d(_z, _z.size(2)).squeeze(2)
z.append(_z)
# Concat conv outputs
z = torch.cat(z, 1)
# FC layers
z = self.fc1(z)
z = self.dropout(z)
logits = self.fc2(z)
return conv_outputs, logits_____no_output_____# Initialize model
model = TextCNN(embedding_dim=EMBEDDING_DIM,
vocab_size=vocab_size,
num_filters=NUM_FILTERS,
filter_sizes=FILTER_SIZES,
hidden_dim=HIDDEN_DIM,
dropout_p=DROPOUT_P,
num_classes=len(classes),
pretrained_embeddings=embedding_matrix,
freeze_embeddings=False).to(DEVICE)
print (model.named_parameters)<bound method Module.named_parameters of TextCNN(
(embeddings): Embedding(35635, 100, padding_idx=0)
(conv): ModuleList(
(0): Conv1d(100, 50, kernel_size=(2,), stride=(1,))
(1): Conv1d(100, 50, kernel_size=(3,), stride=(1,))
(2): Conv1d(100, 50, kernel_size=(4,), stride=(1,))
)
(dropout): Dropout(p=0.1, inplace=False)
(fc1): Linear(in_features=150, out_features=128, bias=True)
(fc2): Linear(in_features=128, out_features=4, bias=True)
)>
</code>
# train.py_____no_output_____## Training_____no_output_____
<code>
from pathlib import Path
from torch.optim import Adam
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torch.utils.tensorboard import SummaryWriter
%load_ext tensorboardThe tensorboard extension is already loaded. To reload it, use:
%reload_ext tensorboard
LEARNING_RATE = 1e-4
PATIENCE = 3
NUM_EPOCHS = 100_____no_output_____def train_step(model, device, dataset, optimizer):
"""Train step."""
# Set model to train mode
model.train()
train_loss = 0.
correct = 0
# Iterate over train batches
for i, (X, y) in enumerate(dataset.generate_batches()):
# Set device
X, y = X.to(device), y.to(device)
# Reset gradients
optimizer.zero_grad()
# Forward pass
_, logits = model(X)
# Define loss
loss = F.cross_entropy(logits, y)
# Backward pass
loss.backward()
# Update weights
optimizer.step()
# Metrics
y_pred = logits.max(dim=1)[1]
correct += torch.eq(y_pred, y).sum().item()
train_loss += (loss.item() - train_loss) / (i + 1)
train_acc = 100. * correct / len(dataset)
return train_loss, train_acc_____no_output_____def test_step(model, device, dataset):
"""Validation or test step."""
# Set model to eval mode
model.eval()
loss = 0.
correct = 0
y_preds = []
y_targets = []
# Iterate over val batches
with torch.no_grad():
for i, (X, y) in enumerate(dataset.generate_batches()):
# Set device
X, y = X.to(device), y.to(device)
# Forward pass
_, logits = model(X)
# Metrics
loss += F.cross_entropy(logits, y, reduction='sum').item()
y_pred = logits.max(dim=1)[1]
correct += torch.eq(y_pred, y).sum().item()
# Outputs
y_preds.extend(y_pred.cpu().numpy())
y_targets.extend(y.cpu().numpy())
loss /= len(dataset)
accuracy = 100. * correct / len(dataset)
return y_preds, y_targets, loss, accuracy_____no_output_____def train(model, optimizer, scheduler,
train_set, val_set, test_set, writer):
# Epochs
best_val_loss = np.inf
for epoch in range(NUM_EPOCHS):
# Steps
train_loss, train_acc = train_step(model, DEVICE, train_set, optimizer)
_, _, val_loss, val_acc = test_step(model, DEVICE, val_set)
# Metrics
print (f"Epoch: {epoch} | train_loss: {train_loss:.2f}, train_acc: {train_acc:.1f}, val_loss: {val_loss:.2f}, val_acc: {val_acc:.1f}")
writer.add_scalar(tag='training loss', scalar_value=train_loss, global_step=epoch)
writer.add_scalar(tag='training accuracy', scalar_value=train_acc, global_step=epoch)
writer.add_scalar(tag='validation loss', scalar_value=val_loss, global_step=epoch)
writer.add_scalar(tag='validation accuracy', scalar_value=val_acc, global_step=epoch)
# Adjust learning rate
scheduler.step(val_loss)
# Early stopping
if val_loss < best_val_loss:
best_val_loss = val_loss
patience = PATIENCE # reset patience
torch.save(model.state_dict(), MODEL_PATH)
else:
patience -= 1
if not patience: # 0
print ("Stopping early!")
break_____no_output_____# Optimizer
optimizer = Adam(model.parameters(), lr=LEARNING_RATE)
scheduler = ReduceLROnPlateau(optimizer, mode='min', factor=0.1, patience=3)_____no_output_____# Path to save model
MODEL_NAME = 'TextCNN'
MODEL_PATH = Path(f'models/{MODEL_NAME}.h5')
Path(MODEL_PATH.parent).mkdir(parents=True, exist_ok=True)_____no_output_____# TensorBoard writer
log_dir = f'tensorboard/{MODEL_NAME}'
!rm -rf {log_dir} # remove if it already exists
writer = SummaryWriter(log_dir=log_dir)_____no_output_____# Training
train(model, optimizer, scheduler,
train_set, val_set, test_set, writer)Epoch: 0 | train_loss: 0.68, train_acc: 78.2, val_loss: 0.49, val_acc: 82.7
Epoch: 1 | train_loss: 0.44, train_acc: 84.6, val_loss: 0.44, val_acc: 84.6
Epoch: 2 | train_loss: 0.40, train_acc: 86.3, val_loss: 0.42, val_acc: 85.5
Epoch: 3 | train_loss: 0.36, train_acc: 87.4, val_loss: 0.40, val_acc: 86.1
Epoch: 4 | train_loss: 0.34, train_acc: 88.4, val_loss: 0.39, val_acc: 86.4
Epoch: 5 | train_loss: 0.31, train_acc: 89.2, val_loss: 0.39, val_acc: 86.6
Epoch: 6 | train_loss: 0.29, train_acc: 90.0, val_loss: 0.38, val_acc: 86.7
Epoch: 7 | train_loss: 0.27, train_acc: 90.8, val_loss: 0.38, val_acc: 86.8
Epoch: 8 | train_loss: 0.25, train_acc: 91.6, val_loss: 0.38, val_acc: 86.9
Epoch: 9 | train_loss: 0.23, train_acc: 92.3, val_loss: 0.38, val_acc: 86.9
Epoch: 10 | train_loss: 0.21, train_acc: 93.1, val_loss: 0.39, val_acc: 86.8
Stopping early!
%tensorboard --logdir {log_dir}_____no_output_____
</code>
## Evaluation_____no_output_____
<code>
import io
import itertools
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support_____no_output_____def plot_confusion_matrix(y_pred, y_target, classes, cmap=plt.cm.Blues):
"""Plot a confusion matrix using ground truth and predictions."""
# Confusion matrix
cm = confusion_matrix(y_target, y_pred)
cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
# Figure
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(cm, cmap=plt.cm.Blues)
fig.colorbar(cax)
# Axis
plt.title("Confusion matrix")
plt.ylabel("True label")
plt.xlabel("Predicted label")
ax.set_xticklabels([''] + classes)
ax.set_yticklabels([''] + classes)
ax.xaxis.set_label_position('bottom')
ax.xaxis.tick_bottom()
# Values
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, f"{cm[i, j]:d} ({cm_norm[i, j]*100:.1f}%)",
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
# Display
plt.show()_____no_output_____def get_performance(y_pred, y_target, classes):
"""Per-class performance metrics. """
performance = {'overall': {}, 'class': {}}
metrics = precision_recall_fscore_support(y_target, y_pred)
# Overall performance
performance['overall']['precision'] = np.mean(metrics[0])
performance['overall']['recall'] = np.mean(metrics[1])
performance['overall']['f1'] = np.mean(metrics[2])
performance['overall']['num_samples'] = np.float64(np.sum(metrics[3]))
# Per-class performance
for i in range(len(classes)):
performance['class'][classes[i]] = {
"precision": metrics[0][i],
"recall": metrics[1][i],
"f1": metrics[2][i],
"num_samples": np.float64(metrics[3][i])
}
return performance_____no_output_____# Test
y_preds, y_targets, test_loss, test_acc = test_step(model, DEVICE, test_set)
print (f"test_loss: {test_loss:.2f}, test_acc: {test_acc:.1f}")test_loss: 0.56, test_acc: 85.8
# Class performance
performance = get_performance(y_preds, y_targets, classes)
print (json.dumps(performance, indent=4)){
"overall": {
"precision": 0.8588907674416577,
"recall": 0.8583333333333333,
"f1": 0.8584737440288595,
"num_samples": 18000.0
},
"class": {
"Business": {
"precision": 0.8334845735027223,
"recall": 0.8164444444444444,
"f1": 0.8248765154916928,
"num_samples": 4500.0
},
"Sci/Tech": {
"precision": 0.8220540540540541,
"recall": 0.8448888888888889,
"f1": 0.8333150684931507,
"num_samples": 4500.0
},
"Sports": {
"precision": 0.9189374856881154,
"recall": 0.8917777777777778,
"f1": 0.9051539415811436,
"num_samples": 4500.0
},
"World": {
"precision": 0.8610869565217392,
"recall": 0.8802222222222222,
"f1": 0.8705494505494505,
"num_samples": 4500.0
}
}
}
# Confusion matrix
plt.rcParams["figure.figsize"] = (7,7)
plot_confusion_matrix(y_preds, y_targets, classes)
print (classification_report(y_targets, y_preds))_____no_output_____
</code>
# inference.py_____no_output_____## Load model_____no_output_____
<code>
# Load model
model = TextCNN(embedding_dim=EMBEDDING_DIM,
vocab_size=vocab_size,
num_filters=NUM_FILTERS,
filter_sizes=FILTER_SIZES,
hidden_dim=HIDDEN_DIM,
dropout_p=DROPOUT_P,
num_classes=len(classes),
pretrained_embeddings=embedding_matrix,
freeze_embeddings=False).to(DEVICE)
model.load_state_dict(torch.load(MODEL_PATH))
model.eval()_____no_output_____
</code>
## Inference_____no_output_____
<code>
import collections_____no_output_____def get_probability_distribution(y_prob, classes):
results = {}
for i, class_ in enumerate(classes):
results[class_] = np.float64(y_prob[i])
sorted_results = {k: v for k, v in sorted(
results.items(), key=lambda item: item[1], reverse=True)}
return sorted_results_____no_output_____def get_top_n_grams(tokens, conv_outputs, filter_sizes):
# Process conv outputs for each unique filter size
n_grams = {}
for i, filter_size in enumerate(filter_sizes):
# Identify most important n-gram (excluding last token)
popular_indices = collections.Counter([np.argmax(conv_output) \
for conv_output in conv_outputs[filter_size]])
# Get corresponding text
start = popular_indices.most_common(1)[-1][0]
n_gram = " ".join([token for token in tokens[start:start+filter_size]])
n_grams[filter_size] = n_gram
return n_grams_____no_output_____# Inputs
texts = ["The Wimbledon tennis tournament starts next week!",
"The President signed in the new law."]
texts = preprocess_texts(texts, lower=LOWER, filters=FILTERS)
X_infer = np.array(X_tokenizer.texts_to_sequences(texts))
print (f"{texts[0]} \n\t→ {X_tokenizer.sequences_to_texts(X_infer)[0]} \n\t→ {X_infer[0]}")
y_filler = np.array([0]*len(texts))the wimbledon tennis tournament starts next week
→ the wimbledon tennis tournament starts next week
→ [ 39 20635 588 622 785 551 576]
# Dataset
infer_set = TextDataset(X=X_infer, y=y_filler, batch_size=BATCH_SIZE,
max_filter_size=max(FILTER_SIZES))_____no_output_____# Iterate over infer batches
conv_outputs = collections.defaultdict(list)
y_probs = []
with torch.no_grad():
for i, (X, y) in enumerate(infer_set.generate_batches()):
# Set device
X, y = X.to(DEVICE), y.to(DEVICE)
# Forward pass
conv_outputs_, logits = model(X)
y_prob = F.softmax(logits, dim=1)
# Save probabilities
y_probs.extend(y_prob.cpu().numpy())
for i, filter_size in enumerate(FILTER_SIZES):
conv_outputs[filter_size].extend(conv_outputs_[i].cpu().numpy())_____no_output_____# Results
results = []
for index in range(len(X_infer)):
results.append({
'raw_input': texts[index],
'preprocessed_input': X_tokenizer.sequences_to_texts([X_infer[index]])[0],
'probabilities': get_probability_distribution(y_prob[index], y_tokenizer.classes),
'top_n_grams': get_top_n_grams(
tokens=preprocessed_input.split(' '),
conv_outputs={k:v[index] for k,v in conv_outputs.items()},
filter_sizes=FILTER_SIZES)})
print (json.dumps(results, indent=4))[
{
"raw_input": "the wimbledon tennis tournament starts next week",
"preprocessed_input": "the wimbledon tennis tournament starts next week",
"probabilities": {
"Sports": 0.9998615980148315,
"World": 0.0001376205327687785,
"Business": 7.324182433876558e-07,
"Sci/Tech": 7.507998844857866e-08
},
"top_n_grams": {
"2": "tournament starts",
"3": "the wimbledon tennis",
"4": "tennis tournament starts next"
}
},
{
"raw_input": "the president signed in the new law",
"preprocessed_input": "the president signed in the new law",
"probabilities": {
"World": 0.6943650245666504,
"Sports": 0.14958152174949646,
"Business": 0.1257830113172531,
"Sci/Tech": 0.03027038462460041
},
"top_n_grams": {
"2": "law",
"3": "the president signed",
"4": "the president signed in"
}
}
]
</code>
Use inferences to collect information how the model performs on your real world data and use it to improve it over time.
- Use a probability threshold for the top class (ex. If the predicted class is less than 75%, send the inference for review).
- Combine the above with Use probability thresholds for each class (ex. if the predicted class is `Sports` at 85% but that class's precision/recall is low, then send it for review but maybe you don't do this when the predicted class is `Sports` but above 90%.
- If the preprocessed sentence has <UNK> tokens, send the inference for further review.
- When latency is not an issue, use the n-grams to validate the prediction._____no_output_____Check out the `API` lesson to see how all of this comes together to create an ML service._____no_output_____---
Share and discover ML projects at <a href="https://madewithml.com/">Made With ML</a>.
<div align="left">
<a class="ai-header-badge" target="_blank" href="https://github.com/madewithml/lessons"><img src="https://img.shields.io/github/stars/madewithml/lessons.svg?style=social&label=Star"></a>
<a class="ai-header-badge" target="_blank" href="https://www.linkedin.com/company/madewithml"><img src="https://img.shields.io/badge/style--5eba00.svg?label=LinkedIn&logo=linkedin&style=social"></a>
<a class="ai-header-badge" target="_blank" href="https://twitter.com/madewithml"><img src="https://img.shields.io/twitter/follow/madewithml.svg?label=Follow&style=social"></a>
</div>
_____no_output_____
| {
"repository": "AlekseiMikhalev/lessons",
"path": "notebooks/03_APIs/02_ML_Scripts/02_PT_ML_Scripts.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 1,
"size": 104836,
"hexsha": "d0ebb72363d326ecdfdf0f622c8972a467ab21de",
"max_line_length": 104836,
"avg_line_length": 104836,
"alphanum_fraction": 0.7651474684
} |
# Notebook from marixko/tutorial_classifiers
Path: The_Basics_of_Supervised_Learning_For_Astronomers.ipynb
<a href="https://colab.research.google.com/github/marixko/Supervised_Learning_Tutorial/blob/master/The_Basics_of_Supervised_Learning_For_Astronomers.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____
###**About Google's Colaboratory: **
This is a free Jupyter environment that runs in Google's cloud, which means you can run codes in your computer without having to install anything. You can create a copy of this tutorial in your own Google's Drive and make your own changes. Colaboratory also allows you to easily share your code with others! [Read more](https://colab.research.google.com/notebooks/basic_features_overview.ipynb)
---_____no_output_____# Introduction
> **Author**: Lilianne M. I. Nakazono (email: [email protected])
> PhD student at Instituto de Astronomia, Geofísica e Ciências Atmosféricas -- Universidade de São Paulo (IAG-USP). Bachelor's degree in Statistics (IME-USP) and in Astronomy (IAG-USP).
> **April 2019**
---
_____no_output_____
###**What is Machine Learning?**
From SAS:
>> *"Machine learning is a method of data analysis that automates analytical model building. It is a branch of artificial intelligence based on the idea that systems can learn from data, identify patterns and make decisions with minimal human intervention."*
###**What is Supervised Learning?**#
From S.B. Kotsiantis (2007):
>> *"Every instance in any dataset used by machine learning algorithms is represented using the same set of features. The features may be continuous, categorical or binary. If instances are given with known labels (the corresponding correct outputs) then the learning is called *supervised*, in contrast to *unsupervised learning*, where instances are unlabeled."*
---
###**STAR/GALAXY separation**#
In this tutorial we will perform a STAR/GALAXY separation using a real dataset from [S-PLUS](http://www.splus.iag.usp.br/). This data were already matched with [SDSS](https://www.sdss.org/) (DR15) spectroscopical data and it will be used to train and test the supervised classifiers. The final step (not included in this tutorial) is to use the trained model to predict the classification of your unknown objects.
This tutorial will be entirely in Python 3 and we will go through the following topics:
- Introduction to `Pandas` ([Documentation](https://pandas.pydata.org/))
- Data visualization with `seaborn` ([Documentation](https://seaborn.pydata.org/))
- Classification methods with `sklearn` ([Documentation](https://scikit-learn.org/stable/index.html))
---
_____no_output_____**Additional information about the data**
ID - Object ID Number
RA - Right Ascension in decimal degrees [J2000]
Dec - Declination in decimal degrees [J2000]
FWHM_n - Normalized Full width at half maximum to detection image seeing (pixels)
A - Profile RMS along major axis (pixels)
B - Profile RMS along minor axis (pixels)
KrRadDet - Kron apertures in units of A or B (pixels)
uJAVA_auto, F378_auto, F395_auto, F410_auto, F430_auto, g_auto, F515_auto, r_auto, F660_auto, i_auto, F861_auto, z_auto - Total-restricted magnitudes (AB) in corresponding filters
class - Spectroscopic classification from SDSS
_____no_output_____#**1. Libraries and Functions**
_____no_output_____
<code>
import seaborn as sns
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.metrics import confusion_matrix
import itertools
from mlxtend.plotting import plot_decision_regions
import matplotlib as mpl
import matplotlib.gridspec as gridspec
from sklearn import metrics
pd.set_option("display.max_rows", None)
pd.set_option("display.max_columns", None)_____no_output_____# Modified from: https://scikit-learn.org/stable/auto_examples/model_selection/plot_confusion_matrix.html
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.3f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()_____no_output_____
</code>
#**2. Read Data**
For statistical/machine learning purposes it is **always** better to read the data in a dataframe (data structured in labels for rows and columns) format.
_____no_output_____
<code>
#Reading dataset from github and saving as dataframe
url = 'https://raw.githubusercontent.com/marixko/'
file = 'tutorial_classifiers/master/tutorial_data.txt'
df = pd.read_csv(url+file, delim_whitespace=True, low_memory=False)_____no_output_____# Run this cell to quickly check your dataset
df_____no_output_____# Check header
list(df)_____no_output_____
</code>
#**3. Pre-analysis**_____no_output_____
Before applying any kind of analysis, you need to be aware of any problem in your dataset that can affect your training (e.g. missing values and outliers). Sometimes it will require pre-processing your dataset beforehand (e.g. for missing values, interpolating values or removing them from data may be necessary). _____no_output_____
<code>
# You can check your dataset by using describe().
# It will return the total count, mean, standard deviation,
# minimum, Q1, Q2 (median), Q3 and maximum
df.describe()
# If you want to check a specific feature use for instance:
# df.FWHM_n.describe()_____no_output_____
</code>
Another good practice is to check high correlations in your dataset, which can allow you to identify which features are redundant. Thus, you can also be able to reduce dimensionality of your dataset.
>> *"The fact that many features depend on one another often unduly influences the accuracy of supervised ML classification models. This problem can be addressed by construction new features from the basic feature set."* -- S.B. Kotsiantis (2007)
(One way to deal with multicollinearity -- when 2 or more features are moderately or highly correlated -- is creating a new feature set using [Principal Component Analysis](https://en.wikipedia.org/wiki/Principal_component_analysis).)_____no_output_____
<code>
plt.close()
f, ax = plt.subplots(figsize=(8, 8))
var = ['FWHM_n', 'A', 'B', 'KrRadDet', 'uJAVA_auto',
'F378_auto', 'F395_auto', 'F410_auto', 'g_auto', 'F515_auto',
'r_auto', 'F660_auto', 'i_auto', 'F861_auto', 'z_auto']
corr = df[var].corr()
sns.heatmap(corr, mask=np.zeros_like(corr, dtype=np.bool),
cmap=sns.diverging_palette(220, 10, as_cmap=True),
square=True, ax=ax, center=0, vmin=-1, vmax=1)
plt.title('Correlation Matrix')
plt.show()
#It would also be interesting to check the correlation plot for each class_____no_output_____
</code>
Qualitative variables can also be included. In this case, however, there are no qualitative features that came from S-PLUS observations.
But let's check the classification label counts:_____no_output_____
<code>
# For qualitative variables, use value_counts()
df['class'].value_counts()_____no_output_____
</code>
Note that for this example the classes are balanced. It represents a best case scenario, which rarely happens in the real world.
Be very careful with imbalanced datasets! Some methods and metrics are not good for imbalanced cases, some manipulation in your sampling method (e.g. over/under-sampling) or in your algorithm (e.g. penalized classification) may be necessary.
_____no_output_____> **Note:** Supervised Learning is not suitable for problems like "I want to find very rare objects that we have never found before!". The learning process is based on your ground-truth samples, so you need to ask yourself "Is my ground-truth sample representative of what I want to find?"_____no_output_____#** 4. Feature Selection**_____no_output_____A very important step of the analysis is choosing your input features. Sometimes you already know which features you need to use to achieve your goals, which comes from your previous knowledge about the topic. However, you can also evaluate which features will give you the best performance. We will discuss more about it on the following sections.
For didactic purposes, let's consider two feature spaces:
> `dim15` = {all useful information from the catalog}
> `dim2` = {normalized FWHM, Profile RMS along major axis}_____no_output_____
<code>
dim15 = ['FWHM_n', 'A', 'B', 'KrRadDet', 'uJAVA_auto',
'F378_auto', 'F395_auto', 'F410_auto', 'g_auto', 'F515_auto',
'r_auto', 'F660_auto', 'i_auto', 'F861_auto', 'z_auto']
dim2 = ['FWHM_n','A']_____no_output_____
</code>
#** 5. Sampling training and testing sets **_____no_output_____Regardless of the classification method you choose, you will want to estimate how accurately your predictive model will perform. This is called **cross-validation** and there are several ways to do it. Some examples are:
* **Holdout method**: randomly separate your original dataset into the training and the testing set. It's very common to adopt 1:3 ratio for the size of test/training sets, although you can choose another ratio. Very simple and fast computationally, but you need to be cautious as it is a single run method. Thus, it may be subject to large variabilities
* **Leave-p-out cross-validation**:
Uses p observations as the testing set and the remaining observations as the training set. Repeat to cover any sampling possibility
* **k-fold cross-validation**: the original dataset is randomly partitioned into k equal sized subsamples. One subsample will be used as testing set and the other k-1 as training set. Repeat k times, until each subsample is used exactly once as the testing set.
I strongly recommend that you also check the other methods before choosing one. For this tutorial we will use the **Holdout method**, for simplicity._____no_output_____
<code>
label = pd.DataFrame(df['class'])
# Transform strings into numbered labels
label.loc[label['class'] == 'STAR', 'class'] = 0
label.loc[label['class'] == 'GALAXY', 'class'] = 1
# Use train_test_split() to sample your training and testing sets
# Let's fix a random_state=42 in order to have the same sets
# on each run. Stratify parameter guarantees that the original
# proportion of the classes is maintained
X_train, X_test, y_train, y_test = train_test_split(df[dim15], label,
test_size=0.3,
random_state=42,
stratify = label)_____no_output_____
</code>
#** 6. Classification method: Support Vector Machine (SVM)**_____no_output_____We finally reached the point where we are going to run a classification algorithm. It is common to think, at first, that this would be the most complicated part, but a well-done job will require you to spend most of your time on the other steps.
There are several classification methods you can use, each of them has its own pros and cons, depending on your science goals and on your dataset. I will give you an example using Support Vector Machine (SVM) with linear kernel, but I recommend you to also check other methods (e.g. Random Forest, Logistic Regression, K-NN, ...)
**DON'T FORGET TO:**
- Learn the basic idea of the method. You don't need to know all the math behind it, but you need to know how it works intuitively
- Check what are the assumptions of the method and if your dataset is in agreement with it
- Learn what the parameters of your model (a.k.a. hyperparameters) do. Choosing them wisely can be crucial to have good results in the end. Note: the hyperparameters space can also be part of your validation tests_____no_output_____## 6.1. Basic idea_____no_output_____The SVM finds the hyperplane that best separates your data, based on maximizing the margin between each class. For instance, in one dimension SVM will find a point. For two dimensions, it will be a line. For three dimensions, it will be a plane.
To use a linear kernel, we assume that the data is linearly separable. Otherwise, we should use another kernel (e.g. polynomial).
Read more about SVM [here](https://scikit-learn.org/stable/modules/svm.html#scores-probabilities)
_____no_output_____## 6.2. Feature space: dim2_____no_output_____
<code>
# Train your model:
clf2 = SVC(kernel= 'linear')
clf2.fit(X_train[dim2], y_train.values.ravel())
# Make the predictions:
y_pred2 = clf2.predict(X_test[dim2])
# Plot confusion matrix:
matrix = confusion_matrix(y_test['class'], y_pred2)
fig = plot_confusion_matrix(matrix, classes=['STAR','GALAXY'])
plt.show()_____no_output_____
</code>
From the confusion matrix above we can already see how good the results are: most of our stars (galaxies) are being assigned as stars (galaxies) and just a few percent were misclassified.
Now let's check the plot and how the separation looks like:_____no_output_____
<code>
plt.style.use('seaborn-pastel')
fig = plt.figure(figsize=(18,6))
gs = gridspec.GridSpec(1, 2)
ax = plt.subplot(gs[0,0])
sns.scatterplot(x=X_train.FWHM_n, y=X_train.A,
hue=y_train['class'])
#Calculate margin (from https://scikit-learn.org/stable/auto_examples/svm/plot_svm_margin.html)
w = clf2.coef_[0]
a = -w[0] / w[1]
xx = np.linspace(-5, 5)
yy = a * xx - (clf2.intercept_[0]) / w[1]
margin = 1 / np.sqrt(np.sum(clf2.coef_ ** 2))
yy_down = yy - np.sqrt(1 + a ** 2) * margin
yy_up = yy + np.sqrt(1 + a ** 2) * margin
#Plot margin
plt.plot(xx, yy, 'k-')
plt.plot(xx, yy_down, 'k--')
plt.plot(xx, yy_up, 'k--')
plt.xlabel('FWHM_n')
plt.ylabel('A')
plt.xlim(0,8)
plt.ylim(0.8, 10)
plt.title('Training set')
ax = plt.subplot(gs[0,1])
sns.scatterplot(x=X_test.FWHM_n , y=X_test.A, hue=y_test['class'])
plt.plot(xx, yy, 'k-')
plt.plot(xx, yy_down, 'k--')
plt.plot(xx, yy_up, 'k--')
plt.xlim(0,8)
plt.ylim(0.8, 10)
plt.title('Testing set')
plt.show()_____no_output_____
</code>
The solid line corresponds to the optimal threshold found by SVM. The dashed lines in the plots above correspond to the maximized margin that I mentioned in Section 6.1.
These are calculated using only a small part of the data: the objects around where the separation may occur, they are called the Support Vectors. Let's check which ones were considered for this classification:_____no_output_____
<code>
fig = plt.figure(figsize=(9,7))
sns.scatterplot(x=X_train[dim2].FWHM_n, y=X_train[dim2].A,
hue=y_train['class'])
plt.scatter(clf2.support_vectors_[:, 0],
clf2.support_vectors_[:, 1], s=8,
zorder=10,color='red', marker='+')
plt.xlim(0.9,2)
plt.ylim(0.8,5)
plt.plot(xx, yy, 'k-')
plt.plot(xx, yy_down, 'k--')
plt.plot(xx, yy_up, 'k--')
plt.title('Support vectors (Training set)')_____no_output_____
</code>
## 6.3. Feature space: dim15_____no_output_____In the last section we saw how SVM works in a 2D space. In that case, it is possible to visually check the separation. However, we have much more information available. if we analyse them altogether, it can improve our results. Although, it is impossible to visually check the results, so we need to rely on performance metrics that we will discuss further on the next section.
_____no_output_____
<code>
# Train your model:
clf15 = SVC(kernel= 'linear')
clf15.fit(X_train, y_train.values.ravel())
# Make predictions:
y_pred = clf15.predict(X_test)
# Plot confusion matrix:
matrix = confusion_matrix(y_test['class'], y_pred)
fig = plot_confusion_matrix(matrix, classes=['STAR','GALAXY'])
plt.show()
# Yeah, as simple as that! :) _____no_output_____
</code>
#** 7. Validation and Model Selection**_____no_output_____How can we choose between two (or more) different models?
For that, we have several performance metrics that we can consider when selecting the best model and I will show a few of them.
The way you are going to analyze the metrics depends on your science goals. For instance:
* In a STAR/GALAXY separation you are probably not interested in a specific class, but in the overall classification. You can evaluate your model using, for example, Accuracy or F-measure
* Suppose you had a STAR/QSO problem instead, where your main goal is to find new QSOs. You can evaluate your model using, for example, Precision, Recall or F-measure.
_____no_output_____## 7.1 Accuracy_____no_output_____Defined as the fraction of correct predictions.
(Note: accuracy will be biased towards the class with higher frequency, don't rely on this measurement if you have an imbalanced dataset)_____no_output_____
<code>
print("Accuracy")
print(" First model (dim2):",
np.round(100*metrics.accuracy_score(y_test, y_pred2),2), '%')
print(" Second model (dim15):",
np.round(100*metrics.accuracy_score(y_test, y_pred),2), '%')_____no_output_____
</code>
## 7.2. Precision_____no_output_____Defined as:
> Precision $\equiv \frac{TP}{(TP+FP)}$
TP - True Positive ; FP - False Positive
Note that you need to define which class will be your "positive". For example:
| STAR (predicted) | GALAXY (predicted)
--- | ---
**STAR** (true label) | True Negative | False Positive
**GALAXY** (true label)| False Negative | True Positive
In Astronomy, it's called **purity**._____no_output_____
<code>
P2 = metrics.precision_score(y_test, y_pred2, pos_label=1)
P = metrics.precision_score(y_test, y_pred, pos_label=1)
print("Galaxy Precision")
print(" First model (dim2):", np.round(100*P2,2), '%')
print(" Second model (dim15):", np.round(100*P,2), '%')
# Exercise: Calculate star precision for each model_____no_output_____
</code>
## 7.3. Recall_____no_output_____Defined as:
> Recall $\equiv \frac{TP}{(TP+FN)}$
TP - True Positive ; FN - False Negative
In Astronomy, it's called **completeness**._____no_output_____
<code>
R2 = metrics.recall_score(y_test, y_pred2, pos_label=1)
R = metrics.recall_score(y_test, y_pred, pos_label=1)
print("Galaxy Recall")
print(" First model (dim2):", np.round(100*R2,2), '%')
print(" Second model (dim15):", np.round(100*R,2), '%')
# Exercise: Calculate star recall for each model_____no_output_____
</code>
## 7.4. F-measure_____no_output_____It's the harmonic mean of Precision and Recall:
$F = \frac{1}{2}\Big(P_i^{-1}+R_i^{-1}\Big)^{-1} = 2 \times \frac{P_iR_i}{P_i+R_i}, F \in [0,1]$
_____no_output_____
<code>
print("F-measure")
print(" First model (dim2):", np.round(metrics.f1_score(y_test, y_pred2),3))
print(" Second model (dim15):", np.round(metrics.f1_score(y_test, y_pred),3))_____no_output_____
</code>
## Final message_____no_output_____We came to the end of this tutorial, yay! :)
Although it is called "Machine Learning", you are still the one who is going to make crucial decisions. And that is hard work! I hope I was able to give you at least a brief idea of all the steps involved in the process.
Now, play around with the code:
* Try other algorithms with the same feature selection and compare your results using the performance metrics
* Test changing the parameters of your model
* Try it with your own dataset!
_____no_output_____## Read more:
[Supervised Machine Learning: A Review of Classification Techniques](https://books.google.com/books?hl=en&lr=&id=vLiTXDHr_sYC&oi=fnd&pg=PA3&dq=review+supervised+learning&ots=CYpwxt2Bnn&sig=Y79PK3w3Q8CefKaTh03keRFEwyg#v=onepage&q=review%20supervised%20learning&f=false) (S.B. Kotsiantis, 2007)
An Empirical Comparison of Supervised Learning Algorithms Rich (Rich Caruana and Alexandru Niculescu-Mizil, 2006)
Classification of Imbalanced Data: a Review (Yanmin Sun, Andrew K. C. Wong and Mohamed S. Kamel, 2009)
[Cross-validation](https://en.wikipedia.org/wiki/Cross-validation_(statistics)
[A Practical Guide to Support Vector Classification](https://www.csie.ntu.edu.tw/~cjlin/papers/guide/guide.pdf) (Chih-Wei Hsu, Chih-Chung Chang, and Chih-Jen Lin, 2016)
_____no_output_____
| {
"repository": "marixko/tutorial_classifiers",
"path": "The_Basics_of_Supervised_Learning_For_Astronomers.ipynb",
"matched_keywords": [
"STAR"
],
"stars": 9,
"size": 35489,
"hexsha": "d0eced785aa5fc8f1153c5ada9f43ceba6435e60",
"max_line_length": 426,
"avg_line_length": 35.2073412698,
"alphanum_fraction": 0.5226971738
} |
# Notebook from susnato/probml-notebooks
Path: notebooks/smc_logreg_tempering.ipynb
<a href="https://colab.research.google.com/github/probml/probml-notebooks/blob/main/notebooks/smc_logreg_tempering.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>_____no_output_____#SMC for logistic regression
We compare data tempering (IBIS) with temperature tempering.
Code is from
https://github.com/nchopin/particles/blob/master/book/smc_samplers/logistic_reg.py
_____no_output_____
<code>
!git clone https://github.com/nchopin/particles.gitCloning into 'particles'...
remote: Enumerating objects: 1506, done.[K
remote: Counting objects: 100% (690/690), done.[K
remote: Compressing objects: 100% (416/416), done.[K
remote: Total 1506 (delta 445), reused 472 (delta 257), pack-reused 816[K
Receiving objects: 100% (1506/1506), 4.48 MiB | 13.93 MiB/s, done.
Resolving deltas: 100% (968/968), done.
%cd /content/particles/content/particles
!pip install --user .Processing /content/particles
[33m DEPRECATION: A future pip version will change local packages to be built in-place without first copying to a temporary directory. We recommend you use --use-feature=in-tree-build to test your packages with this new behavior before it becomes the default.
pip 21.3 will remove support for this functionality. You can find discussion regarding this at https://github.com/pypa/pip/issues/7555.[0m
Requirement already satisfied: numpy>=1.18 in /usr/local/lib/python3.7/dist-packages (from particles==0.2) (1.19.5)
Collecting scipy>=1.7
Downloading scipy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (28.5 MB)
[K |████████████████████████████████| 28.5 MB 106 kB/s
[?25hRequirement already satisfied: numba in /usr/local/lib/python3.7/dist-packages (from particles==0.2) (0.51.2)
Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from particles==0.2) (1.0.1)
Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba->particles==0.2) (0.34.0)
Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba->particles==0.2) (57.4.0)
Building wheels for collected packages: particles
Building wheel for particles (setup.py) ... [?25l[?25hdone
Created wheel for particles: filename=particles-0.2-py3-none-any.whl size=573163 sha256=8c1ba4a552ad649ea25b8b27167304323c3b05bd28dd4b6844e5c252f8042588
Stored in directory: /tmp/pip-ephem-wheel-cache-klz7twnq/wheels/c4/ec/4d/9651be18bff1d8c3beaff376421029d3d43569a79306f8a862
Successfully built particles
Installing collected packages: scipy, particles
[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
albumentations 0.1.12 requires imgaug<0.2.7,>=0.2.5, but you have imgaug 0.2.9 which is incompatible.[0m
Successfully installed particles-0.2 scipy-1.7.1
import particles
import particles.state_space_models as ssm
import particles.distributions as dists_____no_output_____"""
Numerical experiment of Chapter 17 (SMC samplers).
Compare IBIS and SMC tempering for approximating:
* the normalising constant (marginal likelihood)
* the posterior expectation of the p coefficients
for a logistic regression model.
See below for how to select the data-set.
Note: the SMC samplers implemented in module smc_samplers are now "waste-free"
by default, see Dau & Chopin (2021), and the documentation of `smc_samplers`
(plus the corresponding jupyter notebook). This script still performs exactly
the same numerical experiments as in the book, based on standard (non
waste-free) SMC samplers. To do so, we added ``wastefree=False`` to the
definition of the corresponding `Feynman-Kac` object. Again, see the
documentation of `smc_samplers` for more details.
"""
from matplotlib import pyplot as plt
import numpy as np
from numpy import random
import seaborn as sb
import particles
from particles import datasets as dts
from particles import distributions as dists
from particles import resampling as rs
from particles import smc_samplers as ssps
from particles.collectors import Moments
datasets = {'pima': dts.Pima, 'eeg': dts.Eeg, 'sonar': dts.Sonar}
dataset_name = 'eeg' # choose one of the three
data = datasets[dataset_name]().data
T, p = data.shape
# for each dataset, we adapt:
# * N: number of particles
# * Ks = list of Ks (nr MCMC steps)
# * typK: value of M used for plots on "typical" run
if dataset_name == 'sonar':
N = 10 ** 4
Ks = [10, 20, 30, 40, 50, 60]
typK = 50
elif dataset_name == 'pima':
N = 10 ** 3
Ks = [1, 3, 5]
typK = 3
elif dataset_name == 'eeg':
N = 10 ** 3
#Ks = [1, 3, 5, 7, 10, 15, 20]
Ks = [1, 3, 5]
typK = 5
# prior & model
prior = dists.StructDist({'beta':dists.MvNormal(scale=5.,
cov=np.eye(p))})
class LogisticRegression(ssps.StaticModel):
def logpyt(self, theta, t):
# log-likelihood factor t, for given theta
lin = np.matmul(theta['beta'], data[t, :])
return - np.logaddexp(0., -lin)
_____no_output_____
# algorithms
# N and values of K set above according to dataset
ESSrmin = 0.5
nruns = 2 # 16
results = []
# runs
print('Dataset: %s' % dataset_name)
for K in Ks:
for i in range(nruns):
# need to shuffle the data for IBIS
random.shuffle(data)
model = LogisticRegression(data=data, prior=prior)
for alg_type in ['tempering', 'ibis']:
if alg_type=='ibis':
fk = ssps.IBIS(model=model, wastefree=False, len_chain=K + 1)
pf = particles.SMC(N=N, fk=fk, ESSrmin=ESSrmin,
collect=[Moments], verbose=False)
else:
fk = ssps.AdaptiveTempering(model=model, ESSrmin=ESSrmin,
wastefree=False, len_chain = K + 1)
pf = particles.SMC(N=N, fk=fk, ESSrmin=1., collect=[Moments],
verbose=True)
# must resample at every time step when doing adaptive
# tempering
print('%s, K=%i, run %i' % (alg_type, K, i))
pf.run()
print('CPU time (min): %.2f' % (pf.cpu_time / 60))
print('loglik: %f' % pf.logLt)
res = {'K': K, 'type': alg_type, 'out': pf.summaries,
'cpu': pf.cpu_time}
if alg_type=='ibis':
n_eval = N * (T + K * sum([t for t in range(T) if
pf.summaries.rs_flags[t]]))
else:
n_eval = N * T * (1. + K * (len(pf.summaries.ESSs) - 1))
res['path_sampling'] = pf.X.shared['path_sampling'][-1]
res['exponents'] = pf.X.shared['exponents']
res['n_eval'] = n_eval
results.append(res)
Dataset: eeg
tempering, K=1, run 0
t=0, ESS=500.00, tempering exponent=9.31e-05
t=1, Metropolis acc. rate (over 1 steps): 0.257, ESS=500.00, tempering exponent=0.00029
t=2, Metropolis acc. rate (over 1 steps): 0.275, ESS=500.00, tempering exponent=0.000662
t=3, Metropolis acc. rate (over 1 steps): 0.265, ESS=500.00, tempering exponent=0.00142
t=4, Metropolis acc. rate (over 1 steps): 0.298, ESS=500.00, tempering exponent=0.00251
t=5, Metropolis acc. rate (over 1 steps): 0.304, ESS=500.00, tempering exponent=0.00385
t=6, Metropolis acc. rate (over 1 steps): 0.323, ESS=500.00, tempering exponent=0.00571
t=7, Metropolis acc. rate (over 1 steps): 0.320, ESS=500.00, tempering exponent=0.0086
t=8, Metropolis acc. rate (over 1 steps): 0.353, ESS=500.00, tempering exponent=0.0139
t=9, Metropolis acc. rate (over 1 steps): 0.337, ESS=500.00, tempering exponent=0.0222
t=10, Metropolis acc. rate (over 1 steps): 0.318, ESS=500.00, tempering exponent=0.0332
t=11, Metropolis acc. rate (over 1 steps): 0.283, ESS=500.00, tempering exponent=0.0517
t=12, Metropolis acc. rate (over 1 steps): 0.358, ESS=500.00, tempering exponent=0.0835
t=13, Metropolis acc. rate (over 1 steps): 0.336, ESS=500.00, tempering exponent=0.122
t=14, Metropolis acc. rate (over 1 steps): 0.334, ESS=500.00, tempering exponent=0.174
t=15, Metropolis acc. rate (over 1 steps): 0.307, ESS=500.00, tempering exponent=0.233
t=16, Metropolis acc. rate (over 1 steps): 0.322, ESS=500.00, tempering exponent=0.289
t=17, Metropolis acc. rate (over 1 steps): 0.317, ESS=500.00, tempering exponent=0.35
t=18, Metropolis acc. rate (over 1 steps): 0.393, ESS=500.00, tempering exponent=0.443
t=19, Metropolis acc. rate (over 1 steps): 0.404, ESS=500.00, tempering exponent=0.571
t=20, Metropolis acc. rate (over 1 steps): 0.405, ESS=500.00, tempering exponent=0.699
t=21, Metropolis acc. rate (over 1 steps): 0.446, ESS=500.00, tempering exponent=0.832
t=22, Metropolis acc. rate (over 1 steps): 0.408, ESS=500.00, tempering exponent=0.995
t=23, Metropolis acc. rate (over 1 steps): 0.429, ESS=999.31, tempering exponent=1
CPU time (min): 0.59
loglik: -9894.531888
ibis, K=1, run 0
CPU time (min): 0.45
loglik: -9877.739537
tempering, K=1, run 1
t=0, ESS=500.00, tempering exponent=9.36e-05
t=1, Metropolis acc. rate (over 1 steps): 0.256, ESS=500.00, tempering exponent=0.000288
t=2, Metropolis acc. rate (over 1 steps): 0.263, ESS=500.00, tempering exponent=0.000657
t=3, Metropolis acc. rate (over 1 steps): 0.285, ESS=500.00, tempering exponent=0.00138
t=4, Metropolis acc. rate (over 1 steps): 0.298, ESS=500.00, tempering exponent=0.00246
t=5, Metropolis acc. rate (over 1 steps): 0.346, ESS=500.00, tempering exponent=0.00386
t=6, Metropolis acc. rate (over 1 steps): 0.303, ESS=500.00, tempering exponent=0.00593
t=7, Metropolis acc. rate (over 1 steps): 0.306, ESS=500.00, tempering exponent=0.0086
t=8, Metropolis acc. rate (over 1 steps): 0.316, ESS=500.00, tempering exponent=0.0126
t=9, Metropolis acc. rate (over 1 steps): 0.376, ESS=500.00, tempering exponent=0.0204
t=10, Metropolis acc. rate (over 1 steps): 0.459, ESS=500.00, tempering exponent=0.0327
t=11, Metropolis acc. rate (over 1 steps): 0.435, ESS=500.00, tempering exponent=0.0529
t=12, Metropolis acc. rate (over 1 steps): 0.355, ESS=500.00, tempering exponent=0.0838
t=13, Metropolis acc. rate (over 1 steps): 0.357, ESS=500.00, tempering exponent=0.128
t=14, Metropolis acc. rate (over 1 steps): 0.362, ESS=500.00, tempering exponent=0.189
t=15, Metropolis acc. rate (over 1 steps): 0.361, ESS=500.00, tempering exponent=0.265
t=16, Metropolis acc. rate (over 1 steps): 0.388, ESS=500.00, tempering exponent=0.372
t=17, Metropolis acc. rate (over 1 steps): 0.450, ESS=500.00, tempering exponent=0.519
t=18, Metropolis acc. rate (over 1 steps): 0.484, ESS=500.00, tempering exponent=0.669
t=19, Metropolis acc. rate (over 1 steps): 0.442, ESS=500.00, tempering exponent=0.825
t=20, Metropolis acc. rate (over 1 steps): 0.395, ESS=500.00, tempering exponent=0.991
t=21, Metropolis acc. rate (over 1 steps): 0.366, ESS=997.96, tempering exponent=1
CPU time (min): 0.54
loglik: -9913.230136
ibis, K=1, run 1
CPU time (min): 0.48
loglik: -9909.165287
tempering, K=3, run 0
t=0, ESS=500.00, tempering exponent=9.27e-05
t=1, Metropolis acc. rate (over 3 steps): 0.255, ESS=500.00, tempering exponent=0.000277
t=2, Metropolis acc. rate (over 3 steps): 0.249, ESS=500.00, tempering exponent=0.000627
t=3, Metropolis acc. rate (over 3 steps): 0.263, ESS=500.00, tempering exponent=0.00123
t=4, Metropolis acc. rate (over 3 steps): 0.288, ESS=500.00, tempering exponent=0.00226
t=5, Metropolis acc. rate (over 3 steps): 0.293, ESS=500.00, tempering exponent=0.00381
t=6, Metropolis acc. rate (over 3 steps): 0.275, ESS=500.00, tempering exponent=0.00621
t=7, Metropolis acc. rate (over 3 steps): 0.276, ESS=500.00, tempering exponent=0.00967
t=8, Metropolis acc. rate (over 3 steps): 0.275, ESS=500.00, tempering exponent=0.0152
t=9, Metropolis acc. rate (over 3 steps): 0.310, ESS=500.00, tempering exponent=0.0261
t=10, Metropolis acc. rate (over 3 steps): 0.306, ESS=500.00, tempering exponent=0.0463
t=11, Metropolis acc. rate (over 3 steps): 0.302, ESS=500.00, tempering exponent=0.0762
t=12, Metropolis acc. rate (over 3 steps): 0.278, ESS=500.00, tempering exponent=0.117
t=13, Metropolis acc. rate (over 3 steps): 0.279, ESS=500.00, tempering exponent=0.172
t=14, Metropolis acc. rate (over 3 steps): 0.275, ESS=500.00, tempering exponent=0.233
t=15, Metropolis acc. rate (over 3 steps): 0.287, ESS=500.00, tempering exponent=0.3
t=16, Metropolis acc. rate (over 3 steps): 0.292, ESS=500.00, tempering exponent=0.374
t=17, Metropolis acc. rate (over 3 steps): 0.303, ESS=500.00, tempering exponent=0.459
t=18, Metropolis acc. rate (over 3 steps): 0.300, ESS=500.00, tempering exponent=0.548
t=19, Metropolis acc. rate (over 3 steps): 0.279, ESS=500.00, tempering exponent=0.648
t=20, Metropolis acc. rate (over 3 steps): 0.283, ESS=500.00, tempering exponent=0.763
t=21, Metropolis acc. rate (over 3 steps): 0.279, ESS=500.00, tempering exponent=0.871
t=22, Metropolis acc. rate (over 3 steps): 0.289, ESS=500.00, tempering exponent=0.965
t=23, Metropolis acc. rate (over 3 steps): 0.274, ESS=894.72, tempering exponent=1
CPU time (min): 1.71
loglik: -9872.572721
ibis, K=3, run 0
CPU time (min): 1.22
loglik: -9854.823827
tempering, K=3, run 1
t=0, ESS=500.00, tempering exponent=9.23e-05
t=1, Metropolis acc. rate (over 3 steps): 0.258, ESS=500.00, tempering exponent=0.000275
t=2, Metropolis acc. rate (over 3 steps): 0.261, ESS=500.00, tempering exponent=0.000592
t=3, Metropolis acc. rate (over 3 steps): 0.275, ESS=500.00, tempering exponent=0.00117
t=4, Metropolis acc. rate (over 3 steps): 0.286, ESS=500.00, tempering exponent=0.00222
t=5, Metropolis acc. rate (over 3 steps): 0.301, ESS=500.00, tempering exponent=0.00392
t=6, Metropolis acc. rate (over 3 steps): 0.292, ESS=500.00, tempering exponent=0.00641
t=7, Metropolis acc. rate (over 3 steps): 0.284, ESS=500.00, tempering exponent=0.0106
t=8, Metropolis acc. rate (over 3 steps): 0.285, ESS=500.00, tempering exponent=0.0176
t=9, Metropolis acc. rate (over 3 steps): 0.285, ESS=500.00, tempering exponent=0.0278
t=10, Metropolis acc. rate (over 3 steps): 0.267, ESS=500.00, tempering exponent=0.043
t=11, Metropolis acc. rate (over 3 steps): 0.276, ESS=500.00, tempering exponent=0.0676
t=12, Metropolis acc. rate (over 3 steps): 0.272, ESS=500.00, tempering exponent=0.103
t=13, Metropolis acc. rate (over 3 steps): 0.282, ESS=500.00, tempering exponent=0.148
t=14, Metropolis acc. rate (over 3 steps): 0.286, ESS=500.00, tempering exponent=0.205
t=15, Metropolis acc. rate (over 3 steps): 0.296, ESS=500.00, tempering exponent=0.278
t=16, Metropolis acc. rate (over 3 steps): 0.300, ESS=500.00, tempering exponent=0.37
t=17, Metropolis acc. rate (over 3 steps): 0.305, ESS=500.00, tempering exponent=0.467
t=18, Metropolis acc. rate (over 3 steps): 0.303, ESS=500.00, tempering exponent=0.567
t=19, Metropolis acc. rate (over 3 steps): 0.288, ESS=500.00, tempering exponent=0.665
t=20, Metropolis acc. rate (over 3 steps): 0.291, ESS=500.00, tempering exponent=0.759
t=21, Metropolis acc. rate (over 3 steps): 0.283, ESS=500.00, tempering exponent=0.848
t=22, Metropolis acc. rate (over 3 steps): 0.265, ESS=500.00, tempering exponent=0.94
t=23, Metropolis acc. rate (over 3 steps): 0.260, ESS=764.59, tempering exponent=1
CPU time (min): 1.73
loglik: -9863.238013
ibis, K=3, run 1
CPU time (min): 1.18
loglik: -9852.332666
tempering, K=5, run 0
t=0, ESS=500.00, tempering exponent=8.89e-05
t=1, Metropolis acc. rate (over 5 steps): 0.255, ESS=500.00, tempering exponent=0.000279
t=2, Metropolis acc. rate (over 5 steps): 0.251, ESS=500.00, tempering exponent=0.00061
t=3, Metropolis acc. rate (over 5 steps): 0.252, ESS=500.00, tempering exponent=0.00117
t=4, Metropolis acc. rate (over 5 steps): 0.256, ESS=500.00, tempering exponent=0.00206
t=5, Metropolis acc. rate (over 5 steps): 0.268, ESS=500.00, tempering exponent=0.00342
t=6, Metropolis acc. rate (over 5 steps): 0.270, ESS=500.00, tempering exponent=0.00556
t=7, Metropolis acc. rate (over 5 steps): 0.269, ESS=500.00, tempering exponent=0.00937
t=8, Metropolis acc. rate (over 5 steps): 0.259, ESS=500.00, tempering exponent=0.016
t=9, Metropolis acc. rate (over 5 steps): 0.252, ESS=500.00, tempering exponent=0.027
t=10, Metropolis acc. rate (over 5 steps): 0.232, ESS=500.00, tempering exponent=0.0446
t=11, Metropolis acc. rate (over 5 steps): 0.219, ESS=500.00, tempering exponent=0.0686
t=12, Metropolis acc. rate (over 5 steps): 0.222, ESS=500.00, tempering exponent=0.0983
t=13, Metropolis acc. rate (over 5 steps): 0.222, ESS=500.00, tempering exponent=0.135
t=14, Metropolis acc. rate (over 5 steps): 0.234, ESS=500.00, tempering exponent=0.185
t=15, Metropolis acc. rate (over 5 steps): 0.237, ESS=500.00, tempering exponent=0.25
t=16, Metropolis acc. rate (over 5 steps): 0.247, ESS=500.00, tempering exponent=0.32
t=17, Metropolis acc. rate (over 5 steps): 0.246, ESS=500.00, tempering exponent=0.393
t=18, Metropolis acc. rate (over 5 steps): 0.250, ESS=500.00, tempering exponent=0.477
t=19, Metropolis acc. rate (over 5 steps): 0.260, ESS=500.00, tempering exponent=0.571
t=20, Metropolis acc. rate (over 5 steps): 0.257, ESS=500.00, tempering exponent=0.673
t=21, Metropolis acc. rate (over 5 steps): 0.262, ESS=500.00, tempering exponent=0.783
t=22, Metropolis acc. rate (over 5 steps): 0.254, ESS=500.00, tempering exponent=0.881
t=23, Metropolis acc. rate (over 5 steps): 0.263, ESS=500.00, tempering exponent=0.968
t=24, Metropolis acc. rate (over 5 steps): 0.262, ESS=912.40, tempering exponent=1
CPU time (min): 2.95
loglik: -9854.048048
ibis, K=5, run 0
CPU time (min): 1.85
loglik: -9847.005143
tempering, K=5, run 1
t=0, ESS=500.00, tempering exponent=0.000104
t=1, Metropolis acc. rate (over 5 steps): 0.255, ESS=500.00, tempering exponent=0.000321
t=2, Metropolis acc. rate (over 5 steps): 0.256, ESS=500.00, tempering exponent=0.000686
t=3, Metropolis acc. rate (over 5 steps): 0.245, ESS=500.00, tempering exponent=0.0013
t=4, Metropolis acc. rate (over 5 steps): 0.256, ESS=500.00, tempering exponent=0.00224
t=5, Metropolis acc. rate (over 5 steps): 0.261, ESS=500.00, tempering exponent=0.0037
t=6, Metropolis acc. rate (over 5 steps): 0.260, ESS=500.00, tempering exponent=0.00586
t=7, Metropolis acc. rate (over 5 steps): 0.258, ESS=500.00, tempering exponent=0.00952
t=8, Metropolis acc. rate (over 5 steps): 0.255, ESS=500.00, tempering exponent=0.0155
t=9, Metropolis acc. rate (over 5 steps): 0.251, ESS=500.00, tempering exponent=0.0259
t=10, Metropolis acc. rate (over 5 steps): 0.241, ESS=500.00, tempering exponent=0.0427
t=11, Metropolis acc. rate (over 5 steps): 0.233, ESS=500.00, tempering exponent=0.0676
t=12, Metropolis acc. rate (over 5 steps): 0.228, ESS=500.00, tempering exponent=0.101
t=13, Metropolis acc. rate (over 5 steps): 0.232, ESS=500.00, tempering exponent=0.144
t=14, Metropolis acc. rate (over 5 steps): 0.238, ESS=500.00, tempering exponent=0.196
t=15, Metropolis acc. rate (over 5 steps): 0.246, ESS=500.00, tempering exponent=0.253
t=16, Metropolis acc. rate (over 5 steps): 0.249, ESS=500.00, tempering exponent=0.318
t=17, Metropolis acc. rate (over 5 steps): 0.263, ESS=500.00, tempering exponent=0.388
t=18, Metropolis acc. rate (over 5 steps): 0.254, ESS=500.00, tempering exponent=0.467
t=19, Metropolis acc. rate (over 5 steps): 0.262, ESS=500.00, tempering exponent=0.556
t=20, Metropolis acc. rate (over 5 steps): 0.264, ESS=500.00, tempering exponent=0.653
t=21, Metropolis acc. rate (over 5 steps): 0.252, ESS=500.00, tempering exponent=0.755
t=22, Metropolis acc. rate (over 5 steps): 0.266, ESS=500.00, tempering exponent=0.871
t=23, Metropolis acc. rate (over 5 steps): 0.275, ESS=500.00, tempering exponent=0.989
t=24, Metropolis acc. rate (over 5 steps): 0.276, ESS=993.44, tempering exponent=1
CPU time (min): 2.93
loglik: -9854.453127
ibis, K=5, run 1
CPU time (min): 1.69
loglik: -9849.459786
tempering, K=7, run 0
t=0, ESS=500.00, tempering exponent=9.08e-05
t=1, Metropolis acc. rate (over 7 steps): 0.252, ESS=500.00, tempering exponent=0.000277
t=2, Metropolis acc. rate (over 7 steps): 0.258, ESS=500.00, tempering exponent=0.000608
t=3, Metropolis acc. rate (over 7 steps): 0.251, ESS=500.00, tempering exponent=0.00116
t=4, Metropolis acc. rate (over 7 steps): 0.265, ESS=500.00, tempering exponent=0.00206
t=5, Metropolis acc. rate (over 7 steps): 0.277, ESS=500.00, tempering exponent=0.00354
t=6, Metropolis acc. rate (over 7 steps): 0.266, ESS=500.00, tempering exponent=0.00598
t=7, Metropolis acc. rate (over 7 steps): 0.262, ESS=500.00, tempering exponent=0.00971
t=8, Metropolis acc. rate (over 7 steps): 0.257, ESS=500.00, tempering exponent=0.0161
t=9, Metropolis acc. rate (over 7 steps): 0.248, ESS=500.00, tempering exponent=0.0264
t=10, Metropolis acc. rate (over 7 steps): 0.235, ESS=500.00, tempering exponent=0.0437
t=11, Metropolis acc. rate (over 7 steps): 0.236, ESS=500.00, tempering exponent=0.0684
t=12, Metropolis acc. rate (over 7 steps): 0.225, ESS=500.00, tempering exponent=0.1
t=13, Metropolis acc. rate (over 7 steps): 0.232, ESS=500.00, tempering exponent=0.138
t=14, Metropolis acc. rate (over 7 steps): 0.239, ESS=500.00, tempering exponent=0.183
t=15, Metropolis acc. rate (over 7 steps): 0.231, ESS=500.00, tempering exponent=0.241
t=16, Metropolis acc. rate (over 7 steps): 0.244, ESS=500.00, tempering exponent=0.311
t=17, Metropolis acc. rate (over 7 steps): 0.252, ESS=500.00, tempering exponent=0.386
t=18, Metropolis acc. rate (over 7 steps): 0.252, ESS=500.00, tempering exponent=0.468
t=19, Metropolis acc. rate (over 7 steps): 0.262, ESS=500.00, tempering exponent=0.557
t=20, Metropolis acc. rate (over 7 steps): 0.269, ESS=500.00, tempering exponent=0.649
t=21, Metropolis acc. rate (over 7 steps): 0.256, ESS=500.00, tempering exponent=0.738
t=22, Metropolis acc. rate (over 7 steps): 0.259, ESS=500.00, tempering exponent=0.831
t=23, Metropolis acc. rate (over 7 steps): 0.257, ESS=500.00, tempering exponent=0.934
t=24, Metropolis acc. rate (over 7 steps): 0.255, ESS=748.44, tempering exponent=1
CPU time (min): 4.13
loglik: -9848.850450
ibis, K=7, run 0
CPU time (min): 3.02
loglik: -9847.692150
tempering, K=7, run 1
t=0, ESS=500.00, tempering exponent=9.13e-05
t=1, Metropolis acc. rate (over 7 steps): 0.243, ESS=500.00, tempering exponent=0.000287
t=2, Metropolis acc. rate (over 7 steps): 0.246, ESS=500.00, tempering exponent=0.000652
t=3, Metropolis acc. rate (over 7 steps): 0.258, ESS=500.00, tempering exponent=0.00124
t=4, Metropolis acc. rate (over 7 steps): 0.257, ESS=500.00, tempering exponent=0.00214
t=5, Metropolis acc. rate (over 7 steps): 0.252, ESS=500.00, tempering exponent=0.00362
t=6, Metropolis acc. rate (over 7 steps): 0.251, ESS=500.00, tempering exponent=0.00597
t=7, Metropolis acc. rate (over 7 steps): 0.252, ESS=500.00, tempering exponent=0.00939
t=8, Metropolis acc. rate (over 7 steps): 0.241, ESS=500.00, tempering exponent=0.0148
t=9, Metropolis acc. rate (over 7 steps): 0.238, ESS=500.00, tempering exponent=0.0234
t=10, Metropolis acc. rate (over 7 steps): 0.229, ESS=500.00, tempering exponent=0.0379
t=11, Metropolis acc. rate (over 7 steps): 0.235, ESS=500.00, tempering exponent=0.0614
t=12, Metropolis acc. rate (over 7 steps): 0.237, ESS=500.00, tempering exponent=0.0945
t=13, Metropolis acc. rate (over 7 steps): 0.228, ESS=500.00, tempering exponent=0.137
t=14, Metropolis acc. rate (over 7 steps): 0.240, ESS=500.00, tempering exponent=0.19
t=15, Metropolis acc. rate (over 7 steps): 0.240, ESS=500.00, tempering exponent=0.248
t=16, Metropolis acc. rate (over 7 steps): 0.244, ESS=500.00, tempering exponent=0.311
t=17, Metropolis acc. rate (over 7 steps): 0.239, ESS=500.00, tempering exponent=0.379
t=18, Metropolis acc. rate (over 7 steps): 0.249, ESS=500.00, tempering exponent=0.455
t=19, Metropolis acc. rate (over 7 steps): 0.253, ESS=500.00, tempering exponent=0.538
t=20, Metropolis acc. rate (over 7 steps): 0.261, ESS=500.00, tempering exponent=0.631
t=21, Metropolis acc. rate (over 7 steps): 0.260, ESS=500.00, tempering exponent=0.726
t=22, Metropolis acc. rate (over 7 steps): 0.251, ESS=500.00, tempering exponent=0.817
t=23, Metropolis acc. rate (over 7 steps): 0.251, ESS=500.00, tempering exponent=0.907
t=24, Metropolis acc. rate (over 7 steps): 0.266, ESS=563.15, tempering exponent=1
CPU time (min): 4.11
loglik: -9850.407733
ibis, K=7, run 1
CPU time (min): 2.58
loglik: -9847.138432
# plots
#######
savefigs = True # do you want to save figures as pdfs
plt.style.use('ggplot')
pal = sb.dark_palette('white', n_colors=2)
# Compare standard and path sampling estimates of the log-normalising cst
plt.figure()
diff_est = [(r['out'].logLts[-1] - r['path_sampling'])
for r in results if r['type']=='tempering']
sb.histplot(diff_est)_____no_output_____# Figure 17.1: typical behaviour of IBIS
typ_ibis = [r for r in results if r['type']=='ibis' and r['K'] == typK][0]
typ_ess = typ_ibis['out'].ESSs
typ_rs_times = np.nonzero(typ_ibis['out'].rs_flags)[0]
# Left panel: evolution of ESS
fig, ax = plt.subplots()
ax.plot(typ_ess, 'k')
ax.set(xlabel=r'$t$', ylabel='ESS')
if savefigs:
plt.savefig(dataset_name + '_typical_ibis_ess.pdf')
plt.savefig(dataset_name + '_typical_ibis_ess.png')
# Right panel: evolution of resampling times
fig, ax = plt.subplots()
ax.plot(typ_rs_times[:-1], np.diff(typ_rs_times), 'ko-')
ax.set(xlabel=r'$t$', ylabel='duration between successive rs')
if savefigs:
plt.savefig(dataset_name + '_typical_ibis_rs_times.pdf')
plt.savefig(dataset_name + '_typical_ibis_rs_times.png')_____no_output_____# Figure 17.2: evolution of temperature in a typical tempering run
typ_temp = [r for r in results if r['type']=='tempering' and r['K'] == typK][0]
expnts = typ_temp['exponents']
plt.figure()
plt.plot(expnts, 'k')
plt.xlabel(r'$t$')
plt.ylabel('tempering exponent')
if savefigs:
plt.savefig(dataset_name + '_typical_tempering_temperatures.pdf')
plt.savefig(dataset_name + '_typical_tempering_temperatures.png')_____no_output_____# nr evals vs K for both algorithms
plt.figure()
sb.boxplot(x=[r['K'] for r in results],
y=[r['n_eval'] for r in results],
hue=[r['type'] for r in results])
plt.xlabel('number MCMC steps')
plt.ylabel('number likelihood evaluations')
if savefigs:
plt.savefig(dataset_name + '_boxplots_nevals_vs_K.pdf')
plt.savefig(dataset_name + '_boxplots_nevals_vs_K.png')_____no_output_____print(type(results))
print(results[0])
for r in results:
print(r['type'], 'K=', r['K'], 'time=', r['cpu'])<class 'list'>
{'K': 1, 'type': 'tempering', 'out': <particles.collectors.Summaries object at 0x7f307c297a90>, 'cpu': 35.458105918, 'path_sampling': -9894.552441367015, 'exponents': [0.0, 9.306707012648636e-05, 0.0002898627563112856, 0.0006618455648883144, 0.0014183980691296664, 0.002507870437781154, 0.0038479550089156626, 0.005705163274481007, 0.008603119112205248, 0.013935392698910011, 0.022230662894991725, 0.033194739869915085, 0.051692383341063665, 0.08354491448844609, 0.12227938118802113, 0.17406891245698036, 0.23286324675513, 0.289010223871243, 0.35035677692909667, 0.443419593914814, 0.5712134103654529, 0.6994814843236252, 0.8324421117283232, 0.9946749866112512, 1.0], 'n_eval': 359520000.0}
tempering K= 1 time= 35.458105918
ibis K= 1 time= 27.205261016999998
tempering K= 1 time= 32.33377925000002
ibis K= 1 time= 28.813738553000007
tempering K= 3 time= 102.83632723400001
ibis K= 3 time= 73.07628615200008
tempering K= 3 time= 103.81470425999998
ibis K= 3 time= 70.56549695900003
tempering K= 5 time= 177.10674820500003
ibis K= 5 time= 111.04784905800011
tempering K= 5 time= 175.84440240699996
ibis K= 5 time= 101.23418587399988
tempering K= 7 time= 247.79403795000007
ibis K= 7 time= 181.4744050459999
tempering K= 7 time= 246.65038172999994
ibis K= 7 time= 154.86092913099992
# Figure 17.3: Box-plots estimate versus number of MCMC steps
# Left panel: marginal likelihood
plt.figure()
sb.boxplot(x=[r['K'] for r in results],
y=[r['out'].logLts[-1] for r in results],
hue=[r['type'] for r in results])
plt.xlabel('number MCMC steps')
plt.ylabel('marginal likelihood')
if savefigs:
plt.savefig(dataset_name + '_boxplots_marglik_vs_K.pdf')
plt.savefig(dataset_name + '_boxplots_marglik_vs_K.png')
# Right panel: post expectation 1st pred
plt.figure()
sb.boxplot(x=[r['K'] for r in results],
y=[r['out'].moments[-1]['mean']['beta'][1] for r in results],
hue=[r['type'] for r in results])
plt.xlabel('number MCMC steps')
plt.ylabel('posterior expectation first predictor')
if savefigs:
plt.savefig(dataset_name + '_boxplots_postexp1_vs_K.pdf')
plt.savefig(dataset_name + '_boxplots_postexp1_vs_K.png')_____no_output_____# Figure 17.4: variance vs CPU trade-off
# variance times K, as a function of K
plt.figure()
#cols = {'ibis': 'gray', 'tempering':'black'}
cols = {'ibis': 'blue', 'tempering':'red'}
lsts = {'ibis': '--', 'tempering': '-'}
for i in range(p):
for alg_type in ['ibis', 'tempering']:
adj_var = []
for K in Ks:
mts = [r['out'].moments[-1]
for r in results if r['K']==K and r['type']==alg_type]
av = (K * np.var([m['mean']['beta'][i] for m in mts]) /
np.mean([m['var']['beta'][i] for m in mts]))
adj_var.append(av)
if i==0:
plt.plot(Ks, adj_var, color=cols[alg_type], label=alg_type,
alpha=.8, linewidth=2, linestyle=lsts[alg_type])
else:
plt.plot(Ks, adj_var, color=cols[alg_type], alpha=.8, linewidth=2,
linestyle=lsts[alg_type])
plt.legend()
plt.xticks(Ks, ['%i' % K for K in Ks]) # force int ticks
plt.xlabel('number MCMC steps')
plt.ylabel(r'variance times number MCMC steps')
if savefigs:
plt.savefig(dataset_name + '_postexp_var_vs_K.pdf')
plt.savefig(dataset_name + '_postexp_var_vs_K.png')_____no_output_____!ls *.png
eeg_boxplots_marglik_vs_K.png eeg_typical_ibis_ess.png
eeg_boxplots_nevals_vs_K.png eeg_typical_ibis_rs_times.png
eeg_boxplots_postexp1_vs_K.png eeg_typical_tempering_temperatures.png
eeg_postexp_var_vs_K.png
!mkdir figures
!mv *.png figures
!mv *.pdf figures
!lsbook CONTRIBUTING.md INSTALL particles setup.py
CHANGELOG docs LICENSE README.md
_config.yml figures papers requirements.txt
!zip -r figures figures adding: figures/ (stored 0%)
adding: figures/eeg_postexp_var_vs_K.pdf (deflated 30%)
adding: figures/eeg_boxplots_marglik_vs_K.pdf (deflated 29%)
adding: figures/eeg_boxplots_postexp1_vs_K.pdf (deflated 29%)
adding: figures/eeg_postexp_var_vs_K.png (deflated 3%)
adding: figures/eeg_typical_ibis_ess.pdf (deflated 6%)
adding: figures/eeg_typical_ibis_ess.png (deflated 6%)
adding: figures/eeg_typical_tempering_temperatures.pdf (deflated 41%)
adding: figures/eeg_boxplots_postexp1_vs_K.png (deflated 10%)
adding: figures/eeg_typical_ibis_rs_times.png (deflated 7%)
adding: figures/eeg_typical_ibis_rs_times.pdf (deflated 35%)
adding: figures/eeg_boxplots_marglik_vs_K.png (deflated 11%)
adding: figures/eeg_boxplots_nevals_vs_K.pdf (deflated 30%)
adding: figures/eeg_boxplots_nevals_vs_K.png (deflated 8%)
adding: figures/eeg_typical_tempering_temperatures.png (deflated 10%)
_____no_output_____
</code>
| {
"repository": "susnato/probml-notebooks",
"path": "notebooks/smc_logreg_tempering.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 166,
"size": 219894,
"hexsha": "d0ed03954d0588fb22d5533ef9e90b2d0ec0259b",
"max_line_length": 39102,
"avg_line_length": 227.6335403727,
"alphanum_fraction": 0.8731161378
} |
# Notebook from tgadf/charts
Path: Africa.ipynb
<code>
## Basic stuff
%load_ext autoreload
%autoreload
from IPython.core.display import display, HTML
display(HTML("<style>.container { width:100% !important; }</style>"))
display(HTML("""<style>div.output_area{max-height:10000px;overflow:scroll;}</style>"""))
#IPython.Cell.options_default.cm_config.lineNumbers = true;
################################################################################
## Python Version
################################################################################
import sys
from io import StringIO
from pandas import DataFrame, read_csv
import urllib
from time import sleep
from fsUtils import isFile
from ioUtils import getFile, saveFile
from webUtils import getHTMLThe autoreload extension is already loaded. To reload it, use:
%reload_ext autoreload
def downloadURL(url):
user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7'
headers={'User-Agent':user_agent,}
print("Now Downloading {0}".format(url))
request=urllib.request.Request(url,None,headers) #The assembled request
response = urllib.request.urlopen(request)
data = response.read() # The data u need
return data, response.getcode()_____no_output_____txt="""Angola
Ngola Ritmos
Waldemar Bastos
Bonga
Teta Lando
Sam Mangwana
Lourdes Van-Dúnem
Matadidi Mario
Paulo Flores
Neide Van-Dúnem
Neblina
Titica
Don Kikas
Neide Van-Dúnem
Army Squad
Diamondog
KeyLiza
Anselmo Ralph
Neide Van-Dúnem
Don Kikas
Buraka Som Sistema
Titica
Dog Murras
Benin
Angelique Kidjo
Wally Badarou
Nigeria
Evelyn summer
Botswana
Banjo Mosele
Franco and Afro Musica
Matsieng
Zeus
Burkina Faso
Balaké
Cheikh Lô
Dramane Kone
Farafina
Burundi
Khadja Nin
Kebby Boy
Sat-B
Cameroon
Njacko Backo
Francis Bebey
Moni Bilé
Diboué Black
Richard Bona
Les Têtes Brulées
Manu Dibango
Charlotte Dipanda
Dyllann
Stanley Enow
Jovi
Michael Kiessou
Coco Mbassi
Yannick Noah
Kristo Numpuby
Sally Nyolo
Petit Pays
Sam Fan Thomas
Lady Ponce
Magasco
Wes Madiko
Daphné
Salatiel
Mr. Leo
Blanche Bailly
Reniss
Sublymme
King B Swag
Cape Verde
Cesaria Evora
Gil Semedo
Côte d'Ivoire
Alpha Blondy
Magic System
Ernesto Djédjé
Tiken Jah Fakoly
DJ Arafat
Serge Beynaud
Foliba trio
Republic of the Congo (Congo-Brazzaville)
Youlou Mabiala
Pierre Moutouari
Werrason
Papa Wemba
Ferre Gola
Fally Ipupa
Mbilia Bel
Abeti Masikini
Madilu System
Youlou Mabiala
Franco Luambo Makiadi
Franklin Boukaka
Koffi Olomide
Democratic Republic of the Congo (former Zaire)
Abeti Masikini
African Fiesta
Avelino
Awilo Longomba
Bimi Ombale
Bisso Na Bisso
Bouro Mpela
Bozi Boziana
Cindy Le Coeur
Dadju
Damso
Dany Engobo
Evoloko Jocker
Diblo Dibala
Dindo Yogo
Fabregas
Fally Ipupa
Ferré Gola
Gaz Mawete
Geo Bilongo
Gibson Butukondolo
Grand Kalle
Héritier Watanabe
Icha Kavons
INNOSS'B
Jean Bosco Mwenda
Jessy Matador
Jimmy Omonga
Josky Kiambukuta Londa
Kalash Criminel
Kanda Bongo Man
Kasai Allstars
Kaysha
Keblack
Kékélé
King Kester Emeneya
Koffi Olomide
Konono Nº1
Kasaloo Kyanga
LU KALA
Langa Langa Stars
Le Grand Kalle
Lokua Kanza
Madilu Système
Maître Gims
Marie Daulne
Marie Misamu
Mayaula Mayoni
Mbongwana Star
M'bilia Bel
Michel Boyibanda
Mohombi
Mose Fan Fan
M'Pongo Love
Naza
Ndombe Opetum
Nico Kasanda
Ninho
Papa Wemba
Pepe Kalle and Empire Bakuba
Ray Lema
Sam Mangwana
Singuila
Tabu Ley Rochereau
Werrason
Youlou Mabiala
Yxng Bane
Egypt
Amal Maher
Amira Selim
Amr Diab
Angham
Anoushka
Carmen Suleiman
Dina El Wedidi
Hisham Abbas
Leila Mourad
Mayam Mahmoud
Mohamed Mounir
Mohammed Abdel Wahab
Tamer Hosny
Ezz Eddin Hosni (1927-2013)
Mounira El Mahdeya
Nesma Mahgoub
Ratiba El-Hefny
Ruby
Sayed Darwish
Shadiya
Sherine
Umm Kulthum
Yasmine Niazy
Yousra
Zizi Adel
Eritrea
Abraham Afewerki
Ethiopia
Aminé
Mulugeta Abate
Teddy Afro
Alemu Aga
Mahmoud Ahmed
Tadesse Alemu
Mulatu Astatke
Aster Aweke
Abatte Barihun
Aragaw Bedaso
Eyasu Berhe
Girma Bèyènè
Ali Birra
Tamrat Desta
Alemayehu Eshete
Tilahun Gessesse
Gigi
Thomas Gobena
Hachalu Hundessa
Kenna
Getatchew Mekurya
Munit Mesfin
LoLa Monroe
Emilia Rydberg
Kuku Sebsebe
Kiros Alemayehu
Tigist Shibabaw
Shantam Shubissa
Abdu Kiar
Walias Band
Wayna
Asnaketch Worku
Dawit Yifru
Gildo Kassa
Yared Negu
Gabon
Oliver N'Goma
Patience Dabany
Annie-Flore Batchiellilys
Gambia
Sona Maya Jobarteh
Foday Musa Suso
Ghana
Guy Warren
Rebop Kwaku Baah
Becca
DopeNation
Fuse ODG
Jay Ghartey
Osibisa
Wendy Shay
Darkovibes
Mugeez
KiDi
Kuami Eugene
Ebony Reigns
Iwan
Kaakie
Samini
Shatta Wale
Stonebwoy
Bernice Ofei
Danny Nettey
Helen Yawson
Joe Beecham
Joe Mettle
Kofi Owusu Dua Anto
Nayaah
Nii Okai
Ohemaa Mercy
Preachers
QwameGaby
Stella Aba Seal
Tagoe Sisters
Diana Hamilton
Joyce Blessing
Efya
A. B. Crentsil
Alex Konadu
Amakye Dede
Ben Brako
Bisa Kdei
C.K. Mann
Daddy Lumba
E. T. Mensah
Ebo Taylor
K. Frimpong
King Bruce
Kojo Antwi
Koo Nimo
Kwabena Kwabena
Jerry Hansen
Ayesem
Ayigbe Edem
Ball J
Bice Osei Kuffour
Buk Bak
C-Real
Castro
Corp Sayvee
D-Black
Efya
EL
Eno Barony
Gasmilla
Kesse
M.anifest
Medikal
Nero X
Okyeame Kwame
Reggie Rockstone
Ruff n Smooth
Sarkodie
Sherifa Gunu
Sway
Tinny
Trigmatic
Joey B
Pappy Kojo
Gurunkz
R2Bees
Kofi Kinaata
Kwesi Arthur
KiDi
Kuami Eugene
Adam Ro
Bobo Shanti
Rascalimu
Rita Marley
Rocky Dawuni
Samini
Sheriff Ghale
Stonebwoy
Fancy Gadam
Abubakari Lunna
Ephraim Amu
Ken Kafui
Philip Gbeho
Guinea
Sona Tata Condé
Sekouba Bambino
Daddi Cool
Les Ballets Africains
Balla et ses Balladins
Bembeya Jazz
Djeli Moussa Diawara
Famoudou Konaté
Mory Kanté
Mamady Keita
Ballet Nimba
Guinea-Bissau
José Carlos Schwarz
Eneida Marta
Kenya
Akothee
Avril
Ayub Ogada
Cece Sagini
Daddy Owen
David Mathenge
Daudi Kabaka
DJ Fita
Eric Wainaina
E-Sir
Fadhili William
Fundi Konde
George Ramogi
Gloria Muliro
Harry Kimani
Jabali Afrika
Jason Dunford
Jua Cali
Kavirondo
King Kaka
Kleptomaniax
Mighty King Kong
Monski
Musa Juma
Naiboi
Necessary Noize
Okatch Biggy
Otile Brown
Princess Jully
Redsan
Roger Whittaker
Sanaipei Tande
Sauti Sol
Size 8
Stella Mwangi
Suzzana Owiyo
Tony Nyadundo
Wahu
Wanyika bands
Simba Wanyika
Willy Paul
Wyre
Liberia
Sundaygar Dearboy
Knero
Takun-J
Madagascar
AmbondronA
Vaiavy Chila
Mily Clément
Ninie Doniah
Rakoto Frah
D'Gary
Régis Gizavo
Eusèbe Jaojoby
Lego
Mahaleo
Erick Manana
Jerry Marcoss
Toto Mwandjani
Oladad
Rabaza
Naka Rabemanantsoa
Andrianary Ratianarivo
Olombelona Ricky
Rossy
Mama Sana
Senge
Madagascar Slim
Tarika
Tearano
Justin Vali
Nicolas Vatomanga
Mali
Boubacar Traoré
Mory Kanté
Salif Keita
Toumani Diabaté
Kandia Kouyaté
Habib Koité
Issa Bagayogo
Rokia Traoré
Tinariwen
Ali Farka Touré
Amadou et Mariam
Oumou Sangaré
Afel Bocoum
Lobi Traoré
Fatoumata Diawara
Djelimady Tounkara
Rail Band
Mauritania
Dimi Mint Abba
Malouma
Noura Mint Seymali
Morocco
Saad Lamjarred
Elam Jay
AnoGhan
Oussama Belhcen
Rajae El Mouhandiz
Mr Sufian
Manal
Two Tone
Muslim
Dizzy DROS
L7a9d
Cut Killer
Canardo
French Montana
ILY
Larbi Batma
Abdessadeq Cheqara
Mohamed Rouicha
World music
Driss El Maloumi
Henry Azra
Mozambique
Wazimbo
Ghorwane
Fany Pfumo
Stewart Sukuma
Moreira Chonguica
Lizha James
Neyma
Mingas
Al Bowlly
Wazimbo
340ml
Afric Simone
Niger
Mamar Kassey
Mdou Moctar
Nigeria
2face Idibia - hip hop and R&B singer
9ice - hip hop and afropop singer
A
A-Q - hip hop artist
Abiodun Koya (born 1980), gospel singer, opera singer
Adé Bantu - Nigerian-German musician, producer, front man of the 13 piece band BANTU
Adekunle Gold - singer, songwriter
Adewale Ayuba - fuji music singer
Afrikan Boy - rapper
Afro Candy - pop singer
Alamu Atatalo - sekere singer, a type of traditional Yoruba music
Ali Jita - Hausa singer and song writer
Amarachi - singer, dancer, violinist
Andre Blaze - rapper
Aramide - Afro-Jazz singer
Aṣa - R&B, country and pop singer-songwriter
Ayinde Bakare - Yoruba jùjú and highlife musician
Ayinla Kollington - Fuji musician
B
Babatunde Olatunji - drummer
Banky W - pop and R&B singer-songwriter
Blackface Naija - reggae musician
Blaqbonez - rapper
Brymo - singer
Burna Boy - reggae-dancehall musician
C
CDQ - rapper, songwriter
Celestine Ukwu - highlife musician
Chidinma - pop singer
Chike - singer, songwriter and actor
Chinko Ekun – rapper, songwriter
Cobhams Asuquo - soul singer
Cynthia Morgan - pop, hip hop and dancehall singer
D
D'banj - pop singer
Da Emperor - indigenous rapper
Da Grin - rapper
Dammy Krane - singer, songwriter
Darey - R&B singer-songwriter
Dauda Epo-Akara - Yoruba musician
Davido - pop singer
Dekumzy - R&B and highlife singer
Dele Ojo - juju music singer and performer
Dice Ailes - pop singer
Di'Ja - singer
Don Jazzy - recording artist and record producer
D'Prince - Afro-pop singer
Dr. Alban - Nigerian-Swedish recording artist and producer
Dr SID - pop singer
Duncan Mighty - reggae singer
E
Ebenezer Obey - jùjú musician
Echezonachukwu Nduka - pianist and musicologist
Eddy Wata - Eurodance singer
Eedris Abdulkareem
Ego Ogbaro
eLDee – rapper, singer, producer
Emeka Nwokedi – conductor and music director
Emma Nyra – R&B singer
Emmy Gee – rapper
Eva Alordiah-rapper and singer
Evi Edna Ogholi-Reggae singer
F
Falz - rapper, songwriter
Faze - R&B singer
Fela Kuti - afrobeat, jazz singer-songwriter and instrumentalist
Fela Sowande
Femi Kuti - afrobeat, jazz singer-songwriter and instrumentalist
Fireboy DML - singer
Flavour N'abania - highlife and hip hop singer
Frank Edwards – gospel singer
G
Genevieve Nnaji - pop singer
H
Helen Parker-Jayne Isibor - opera singer and composer
Harrysong - singer and songwriter
Haruna Ishola
Humblesmith - afropop singer
I
I.K. Dairo
Ice Prince - rapper
Idahams - Singer and song writer
Iyanya - pop singer
J
J. Martins - highlife singer-songwriter and record producer
Jesse Jagz - rapper
Jasën Blu - R&B singer-songwriter and record producer
Joeboy - singer
Johnny Drille - singer
K
Kcee
King Wadada - reggae singer
Kizz Daniel
Koker
Korede Bello
L
Ladipoe
Lagbaja
Lara George
Laycon
Lil Kesh
Lyta
M
M.I - rapper
M Trill - rapper
Majek Fashek - singer-songwriter
May7ven
Maud Meyer - jazz singer
Mike Ejeagha - Highlife musician
Mo'Cheddah - hip hop singer
Mode 9 - rapper
Monica Ogah - pop singer-songwriter
Mr 2Kay
Mr Eazi - singer-songwriter
Mr Raw
Mr Real - house singer
Muma Gee - pop singer-songwriter
Muna - rapper
N
Naeto C
Naira Marley – singer and songwriter
Niniola - Afro-house artist
Niyola - soul and jazz singer
Nneka - hip hop and soul singer
Nonso Amadi
Nosa - gospel artist
O
Obesere - fuji musician
Obiwon - R&B and gospel singer
Olamide - rapper and hip hop artist
Oliver De Coque
Omawumi - soul singer
Omotola Jalade Ekeinde – R&B and pop singer
Onyeka Onwenu - pop singer
Orezi - reggae singer
Oriental Brothers
Oritse Femi
Orlando Julius
Osita Osadebe
Orlando Owoh
P
Patience Ozokwor - highlife singer
Patoranking - reggae and dancehall singer
Pepenazi - rapper, hip hop artist and record producer
Pericoma Okoye
Peruzzi
Peter King
Phyno - rapper and record producer
Praiz - R&B singer and songwriter
Prince Nico Mbarga
R
Reekado Banks - hip hop artist
Rema - Afrobeats and Trap
Rex Lawson
Ric Hassani
Ruby Gyang
Ruggedman - rapper and hip hop artist
Runtown - songwriter and hip hop artist
S
Sade Adu
Safin De Coque - rapper and hip hop artist
Salawa Abeni - Waka singer
Samsong - gospel singer
Sasha P - rapper and singer
Sean Tizzle - Afropop
Seun Kuti - afrobeat, Jazz singer-songwriter and instrumentalist
Seyi Shay - pop singer and songwriter
Shina Peters - juju singer
Simi
Sinach - gospel singer
Skales - rapper and singer
Shola Allynson - Gospel Singer
Sonny Okosuns
Sound Sultan
Stella Damasus - R&B and soul singer
Sunny Ade - jùjú singer
Tamara Jones
Tekno Miles
Tems
Teni
Terry G
Timaya
Tiwa Savage
Timi Dakolo
Toby Foyeh
Tonto Dikeh
Tony Allen
Tony Tetuila
Tonye Garrick
Tope Alabi
Tunde King
Tunde Nightingale
TY Bello
Victor Olaiya
Victor Uwaifo
Waconzy
Waje
Wasiu Alabi Pasuma
Weird MC
William Onyeabor
Wizkid
Ycee
Yemi Alade
Yinka Ayefele
Yinka Davies
Yung6ix
Yusuf Olatunji
Zlatan
Zayn Africa
Zoro African
Rwanda
Alpha Rwirangira
Tom Close
Riderman
King James
Knolwess Butera
Benjami Mugisha
Urban Boyz
Kate Bashabe
Simon Bikindi
Corneille
Miss Jojo
Senegal
Akon
Baaba Maal
Étoile de Dakar
Ismaël Lô
Mansour Seck
Orchestra Baobab
Positive Black Soul
Thione Seck and Raam Daan
Star Band
Touré Kunda
Youssou N'Dour and Étoile de Dakar
Xalam (band)
Sierra Leone
Bai Kamara
S. E. Rogie
Steady Bongo
K-Man
Emmerson
Anis Halloway
Supa Laj
Somalia
Xiddigaha Geeska
Mohamed Mooge Liibaan
Abdullahi Qarshe
Waayaha Cusub
Ali Feiruz
Hasan Adan Samatar
Aar Maanta
Mohamed Sulayman Tubeec
Maryam Mursal
K'naan
Guduuda 'Arwo
Magool
South Africa
African Children's Choir
Afrotraction
AKA, hip-hop artist and record producer
Akustika Chamber Singers, chamber choir from Pretoria
aKing, South African acoustic rock band
Amanda Black, Multi-award winning and platinum-selling Afro-soul singer-songwriter
Amampondo, award-winning traditional Xhosa percussion group from Cape Town
Anatii (born 1993), hip-hop artist and record producer
A-Reece (born 1997), hip-hop artist and lyricist
Leigh Ashton (born 1956), singer-songwriter from Johannesburg
Assagai, Afro-rock band
The Awakening, gothic rock
B
Babes Wodumo, gqom musician
Ballyhoo, 1980s pop band best known for the hit "Man on the Moon"
The Bang
Leonel Bastos (born 1956), Mozambiquan adult contemporary musician and producer working in South Africa
Battery 9
BlackByrd
Busiswa, house musician
BLK JKS
Elvis Blue, musician and songwriter
Boo!
Bles Bridges (1947–2000), singer
Stef Bos
Cristina Boshoff
Jonathan Butler, singer-songwriter and guitarist
The Brother Moves On
Brasse Vannie Kaap
Bright Blue, 1980s pop band, best known for the hit song "Weeping"
Buckfever Underground
Beatenberg
Bongo Maffin, kwaito music group
Boom Shaka
Bucie (born 1987), R&B and soul singer
Guy Buttery
C
Adrienne Camp, singer-songwriter
Captain Stu, ska, funk, punk, and soul fusion band
Arno Carstens, former lead singer of Springbok Nude Girls
Cassette
Cassper Nyovest, rapper and record producer
Tony Cedras (born 1952), musician
Chad, (born 1993), rapper
Yvonne Chaka Chaka, singer
Chris Chameleon, solo artist, lead singer and bass guitarist for Boo
Blondie Chaplin, singer and guitarist
Jesse Clegg (born 1988)
Johnny Clegg (born 1953)
Clout, 1970s rock group
Basil Coetzee (1944–1998), saxophonist
Mimi Coertse (born 1932), musician
Tony Cox (born 1954), guitarist
Crashcarburn
Crossingpoint, Christian progressive hardcore band
Cutting Jade
Civil Twilight
Crow Black Sky
D
Da L.E.S (born 1985), hip-hop artist
Simphiwe Dana (born 1980)
Danny K (Daniel Koppel), R&B singer-songwriter
Kurt Darren, singer
Pierre de Charmoy
Steven De Groote (1953–1989), classical pianist and winner of the Van Cliburn International Piano Competition
Fanie de Jager (born 1949), operatic tenor
Die Antwoord
Die Heuwels Fantasties
Bonginkosi Dlamini (born 1977), poet, actor and singer, also known as Zola
Dollar Brand (born 1934)
Donald, singer
Dorp
Downfall
Dr Victor and the Rasta Rebels, reggae
Dreamteam, hip-hop group from Durban
Jabulani Dubazana, singer, Ladysmith Black Mambazo
Lucky Dube (1964–2007)
Duck and Cover, hard rock band
Ampie du Preez, singer and guitarist
Johnny Dyani (1945–1986), jazz double bassist
DJ Speedsta , Hip Hop Dj
E
Dennis East, singer
Shane Eagle (b. 1996), hip-hop artist
Alton Edwards, singer
Eden, pop band
Elaine, singer and songwriter
Endorphine
Emtee (b. 1992), hip-hop artist
Dawid Engela (1931–1967), composer and musicologist
éVoid, 1980s new wave
Erica Eloff, soprano
F
The Fake Leather Blues Band
Falling Mirror
Brenda Fassie (1964–2004)
Ricky Fataar (born 1952), drummer
Duncan Faure, singer-songwriter formerly with the band Rabbitt
Mongezi Feza (1945–1975), trumpet player and flautist
Anton Fig, drummer
Josh Fix
Fokofpolisiekar, Afrikaans rock band
Foto na Dans, Afrikaans rock band
Four Jacks and a Jill
Johnny Fourie (1937–2007), jazz guitarist
Freshlyground
Fuzigish
Fifi Cooper
G
Hotep Idris Galeta (born 1941), jazz pianist
Goldfish
Anton Goosen (born 1946), singer
Die Grafsteensangers
Goodluck
H
Half Price (band)
Paul Hanmer, composer, pianist, and jazz musician
The Helicopters
Ken E Henson (born 1947), musician
Henry Ate
Sonja Herholdt
Hog Hoggidy Hog
Steve Hofmeyr (born 1964), singer and actor
Die Heuwels Fantasties
I
Abdullah Ibrahim (born 1934)
iFANi
Isochronous
J
Jabu Khanyile (1957–2006)
Jack Parow
Robbie Jansen (1949–2010)
Jeremy Loops (born 1986), modern folk, singer
Jesse Jordan Band
Theuns Jordaan (born 1971), singer and songwriter
Claire Johnston (born 1967), lead singer of Mango Groove
Trevor Jones (born 1949), composer
Armand Joubert
Joy, a vocal group
John Edmond (born 1936), singer
John Ireland (born 1954), singer and songwriter
Julian Bahula, jazz drummer*Juluka
Just Jinjer (previously Just Jinger)
JR, rapper
Junkyard Lipstick
L-Tido (born 1982), hip-hop artist, aka 16V
K
Kabelo Mabalane (born 1976), kwaito artist, former member of TKZee
Kalahari Surfers
Wouter Kellerman, South African flautist
Johannes Kerkorrel (1960–2002)
Sibongile Khumalo (born 1957), singer
KOBUS!
Koos Kombuis (born 1954)
John Kongos (born 1945)
Kongos
Gé Korsten (1927–1999)
David Kramer (born 1951)
Kwesta, hip-hop artist and poet
K.O, hip-hop artist and record producer
Kabza De Small , King of Amapiano
L
Felix Laband, electronic musician
Riku Lätti, songwriter, composer, music producer
Ladysmith Black Mambazo (born 1960), isicathamiya group
Don Laka, jazz musician, pianist, producer
Robert Lange (born 1948), music producer
Lani Groves
Lark
Jack Lerole (c.1940–2003), tin whistle player; singer
Solomon Linda, songwriter
Lira
Locnville
Roger Lucey, singer and guitarist
Lucky Dube, singer and keyboard player
M
Mark Haze, Rock singer
Sipho Mabuse (born 1951), singer
Arthur Mafokate, kwaito singer and composer
Mahlathini and the Mahotella Queens, a mbaqanga band
Vusi Mahlasela (born 1965)
Makgona Tsohle Band (1964–1999), a mbaqanga instrumental band
Bongi Makeba (1950–1985), singer-songwriter
Miriam Makeba (1932–2008)
Malaika (group)
Petronel Malan (1974–), concert pianist
Man As Machine
Mandoza (born 1978), kwaito singer
Mango Groove
Mildred Mangxola (born 1944), singer in Mahlathini and the Mahotella Queens and member of the Mahotella Queens
Manfred Mann
MarcAlex, group known for the hit "Quick Quick"
Josef Marais (1905–1978)
Martin PK
Hugh Masekela (born 1939)
Dorothy Masuka (born 1935), jazz singer
Neels Mattheus (1935-2003), traditional musician
Dave Matthews (born 1967), lead singer and founding member of Dave Matthews Band
Irene Mawela (born 1940), veteran singer and composer
Illana May
Abednego Mazibuko, singer with Ladysmith Black Mambazo
Albert Mazibuko (born 1948), singer with Ladysmith Black Mambazo
Thandiswa Mazwai (born 1976)
Chris McGregor (1936–1990), jazz pianist and composer
Busi Mhlongo (1947–2010), singer, dancer and composer
Mind Assault
Moreira Chonguica (born 1077), jazz saxophonist and producer
Kippie Moeketsi (1925–1983), saxophonist
Pops Mohamed (born 1949), jazz musician
Louis Moholo (born 1940), drummer
Matthew Mole
Lebo Morake (aka Lebo M)
Shaun Morgan (born 1980), singer also known as Shaun Morgan Welgemoed
Ike Moriz (born 1972), singer, composer and lyricist
Jean Morrison
Mshengu White Mambazo (1976–2003), junior choir of Ladysmith Black Mambazo
Russel Mthembu, singer with Ladysmith Black Mambazo
Moozlie (born 1992), hip-hop artist and television presenter
Muzi (born 1991), electronic musician
Moonchild Sanelly Musician and Dancer
N
Nádine (born 1982), singer-songwriter
The Narrow
Nasty C (born 1997), hip-hop artist and record producer
Bongani Ndodana-Breen, composer
Jim Neversink, alternative country singer-songwriter and guitarist
New Academics
Steve Newman
Bernoldus Niemand (1959–1995)
Simon "Mahlathini" Nkabinde (1937–1999), Mbaqanga singer
West Nkosi (1940–1998), mbaqanga musician
No Friends of Harry
Nobesuthu Mbadu (born 1945), singer in Mahlathini and the Mahotella Queens and member of the Mahotella Queens
Siphiwo Ntshebe (1974–2010), operatic tenor from New Brighton, Port Elizabeth
Ashton Nyte, solo artist as well as lead singer and producer of The Awakening
Thys Nywerheid
Nadia Nakai (born 1990), hip-hop artist
O
Sarah Oates, violinist and associate leader Philharmonia orchestra
Wendy Oldfield, rock singer-songwriter
Oskido, record producer and songwriter
P
Jack Parow, hip-hop artist
The Parlotones
Al Paton, singer-songwriter, producer, and percussionist
Peter Toussaint
Petit Cheval
James Phillips, singer-songwriter also known as Bernoldus Niemand
Anke Pietrangeli (born 1982), winner of the second series of Idols
Dizu Plaatjies, founder and former lead singer of Amampondo
Plush
PJ Powers (born 1960)
Prime Circle
Professor (born 1978), Kwaito musician
Dudu Pukwana (1938–1990), saxophonist, pianist, and composer
Purified, Christian hip-hop artist
Patricia Majalisa, bubblegum artist
Q
Qkumba Zoo
R
Rabbitt
Rouge (rapper)
Trevor Rabin (born 1954), musician
Dolly Rathebe (1928–2004)
Laurika Rauch, Afrikaans singer
Riddare av Koden
Surendran Reddy (1962–2010) pianist and composer
Riky Rick (born 1987), hip-hop artist and record producer
Robin Auld
Ray Phiri (1947-2017), Jazz, jazz fusion, reggae and mbaqanga musician
S
Sandy B
Savuka
Robert Schneider of The Apples in Stereo
Leon Schuster
Seether, formerly called Saron Gas, hard rock and alternative metal band
Gerard Sekoto (1913–1993)
Judith Sephuma
Jockey Shabalala (1943–2006), singer with Ladysmith Black Mambazo
Joseph Shabalala (born 1941), lead singer and founder of Ladysmith Black Mambazo
Msizi Shabalala (born 1975), singer with Ladysmith Black Mambazo
Sibongiseni Shabalala (born 1973), singer with Ladysmith Black Mambazo
Troye Sivan (born 1995), South African-born
Thamsanqa Shabalala (born 1977), singer with Ladysmith Black Mambazo
Thulani Shabalala (born 1968), singer with Ladysmith Black Mambazo
Shane Eagle (born 1996), hip-hop artist and lyricist
Shiraz, band active between 1984 - 1984
Margaret Singana (1938–2000)
Robert Sithole, pennywhistle player
Skylight (band)
Kyla-Rose Smith (born 1982), violinist and dancer
Sonja Herholdt
Enoch Sontonga, teacher, lay-preacher and composer who wrote "Nkosi Sikelel' iAfrika"
South African National Youth Orchestra
Springbok Nude Girls
Zanne Stapelberg (born 1977), opera soprano
Dale Stewart (born 1979)
Sterling EQ
Stimela band formed in 1982
Straatligkinders
Sugardrive
Valiant Swart
Okmalumkoolkat (born 1983), hip-hop artist
Stogie T , Hip Hop Artist
T
Tananas
Taxi Violence
Peta Teanet, singer
TKZee, kwaito group
Hilda Tloubatla (born 1942), lead singer of Mahotella Queens, and singer in Mahlathini and the Mahotella Queens
Tokollo Tshabalala, kwaito singer also known as Magesh
Peter Toussaint, singer-songwriter and guitar player
Toya Delazy, pop singer and pianist
Tribe After Tribe
Tuks, hip-hop artist
Tumi and the Volume
Tweak
U
Uhuru— Kwaito and afropop music group
Urban Creep
V
Bobby van Jaarsveld (born 1987), singer-songwriter and actor
Bok van Blerk (born 1978)
Jason van Wyk (born 1990), composer, producer
Van Coke Kartel
Amor Vittone (born 1972)
Valiant Swart (born 1965)
W
Watershed
Wargrave
Shaun Welgemoed (born 1978)
Heinz Winckler (born 1978), singer who won the first series of Idols
Winston's Jive Mixup
Wonderboom
Markus Wormstorm, electronic musician and composer
Y
Pretty Yende (born 1985), operatic soprano from Piet Retief, Mpumalanga
Yorxe (born 1998), singer and songwriter
YoungstaCPT (born 1991), rapper and songwriter
Z
Zahara, singer-songwriter and poet
Zebra & Giraffe
Karen Zoid (born 1978)
Zola (born 1977)
Zonke (born 1979)
Auth3ntiC
South Sudan
Yaba Angelosi
Mary Boyoi
Emmanuel Jal
Silver X
Sudan
Abdel Aziz El Mubarak
Abdel Gadir Salim
AlKabli
Emmanuel Jal
Mohammed Wardi
Mohamed Gubara
Swaziland
Dusty & Stones
Kambi
Tendaness
Tanzania
Ali Kiba
Bill Nass
Joseph Lusungu
Mnenge Ramadhani
Muhiddin Maalim
Hassani Bitchuka
Saidi Mabera
Wilson Kinyonga
Remmy Ongala
Kasaloo Kyanga
Mr. Nice
Saida Karoli
Diamond Platnumz
Lady Jaydee
Professor Jay
TID
Rose Mhando
Vanessa Mdee
A.Y.
Ruby
Rayvanny
Bi Kidude
Carola Kinasha
Imani Sanga
Tudd Thomas
Harmonize
Joel lwaga
Paul Clement
Goodluck Gozbert
Bella Kombo
Sara Nyongole
Angel Benard
Zoravo
Kibonge Wa Yesu
Calvin John
Mirriam Mbepera
Derick Marton
Beda Andrew
Dr. Ipyana
Ashley Nassary
Jessica Honore
Christina Shusho
Walter Chilambo
Boaz Danken
Martha Mwaipaja
John Lisu
Togo
Bella Bellow
King Mensah
Uganda
Holy Keane Amooti
Aziz Azion
A Pass
Afrigo Band
Babaluku
Bataka Squad
Bebe Cool
Bobi Wine
Bosmic Otim
Fresh Kid Uganda
Jose Chameleone
Mac Elvis
Exodus
David Lutalo
Eddy Kenzo
Fik Fameica
Gabriel K
Goodlyfe Crew
Sam Gombya
Sophie Gombya
Giovanni Kiyingi
Jackie Akello
Jackie Chandiru
Janzi Band
Jemimah Sanyu
Jimmy Katumba
Judith Babirye
Juliana Kanyomozi
Paulo Kafeero
Michael Ross Kakooza
Angella Katatumba
Isaiah Katumwa
Joanita Kawalya
Leila Kayondo
Keko
Suzan Kerunen
Maurice Kirya
Klear Kut
Sylver Kyagulanyi
Philly Lutaaya
Levixone
Lydia Jazmine
Lumix Da Don
Mad Ice
Master Blaster
Rachael Magoola
Fred Masagazi
Moses Matovu
Mariam Ndagire
Lilian Mbabazi
Frank Mbalire
Milege
Peter Miles
Phina Mugerwa
Benon Mugumbya
Fille Mutoni
Grace Nakimera
Halima Namakula
Rema Namakula
Iryn Namubiru
Navio
Nick Nola
Irene Ntale
Gravity Omutujju
Geoffrey Oryema
Papa Cidy
Producer Hannz
Rabadaba
Rachel K
Ragga Dee
Radio and Weasle
Ruyonga
Saba Saba aka Krazy Native
Cinderella Sanyu
Ssewa Ssewa
Sera
Sheebah Karungi
Sister Charity
Spice Diana
Madoxx Ssemanda Sematimba
St. Nelly-Sade
The Mith
Henry Tigan
Allan Toniks
Tshila
Trix Lane
Undercover Brothers Ug
Vampino
Viboyo
Elly Wamala
Wilson Bugembe
Bobi Wine
GNL Zamba
Zambia
Alick Nkhata
B Flow
Ballad Zulu
Chef 187
Jordan Katembula
Just Slim
Larry Maluma
Lazarus Tembo
Leo "K'millian" Moyo
Lily Tembo
Macky 2
Maiko Zulu
Mampi
Moonga K.
Nashil Pichen
OC Osilliation
Paul Ngozi
Shom-C
Victor Kachaka
Yvonne Mwale
Petersen Zagaze
Bobby East
Amayenge
Distro Kuomboka
Mashome Blue Jeans
Witch
Zone Fam
Zimbabwe
Barura Express – band
Bhundu Boys – jit and chimurenga music band
Hohodza – band
Mbira dzeNharira – mbira band
Mechanic Manyeruke and the Puritans – gospel music group
R.U.N.N. family – mbira-inspired reggae and rhumba group
Siyaya – music and dance group
Flint Bedrock (born 1985) – pop singer-songwriter
Mkhululi Bhebhe (born 1984) - contemporary gospel vocalist
Charles Charamba (born 1971) – gospel singer[1]
Olivia Charamba (1999–1999) – gospel singer
Brian Chikwava (born 1971) – writer and musician
Simon Chimbetu (1955–2005) – singer-songwriter and guitarist[2]
James Chimombe (1951–1990) – singer and guitarist[2]
Musekiwa Chingodza (born 1970) – mbira and marimba player
Chirikure Chirikure (born 1962) – musician and songwriter
Stella Chiweshe (born 1946) – mbira player and singer-songwriter
Dizzy Dee (1999–1999) – Australia-based reggae artist
Leonard Dembo (1959–1996) – guitarist and singer-songwriter; member of the band Barura Express[2]
Tehn Diamond (born 1985) – Zimbabwean hip hop musician and rapper
Chartwell Dutiro (born 1957) – mbira player and singer-songwriter[3]
Mbuya Dyoko (1944–2013) – mbira player
John Edmond (born 1936) – Rhodesian folk singer
Tendayi Gahamadze (born 1959) – mbira player and singer-songwriter; member of Mbira dzeNharira
Michael Gibbs (born 1937) – England-based jazz composer
Derek Hudson (1934–2005) – English-born conductor and composer
Ngonidzashe Kambarami (born 1983) – urban grooves artist
Victor Kunonga (born 1974) – Afrojazz singer-songwriter
Forward Kwenda (born 1963) – mbira player
Jah Prayzah (born 1987) – Afropop and Afrojazz musician
Hope Masike mbira player and percussionist and singer
Ignatius Mabasa (born 1971) – writer and musician
Alick Macheso (born 1968) – singer-songwriter and guitarist
Safirio Madzikatire (1932–1996) – actor and musician[2]
Madzitatiguru (born 1989) – poet and musician
Takunda Mafika (1983–2011) – mbira player
Cosmas Magaya (born 1953) – mbira player
Tkay Maidza (born 1996) – Australia-based singer-songwriter and rapper
Lovemore Majaivana (born 1954) – Ndebele music singer-songwriter
Zeke Manyika (born 1955) – England-based rock and roll singer-songwriter and drummer
Leonard Mapfumo (born 1983) – urban grooves and hip hop artist
Thomas Mapfumo (born 1945) – chimurenga music artist
Chiwoniso Maraire (1976–2013) – mbira player and singer-songwriter[2]
Dumisani Maraire (1944–1999) – mbira payer and singer-songwriter
Mashasha (born 1982) – guitarist and singer-songwriter
Maskiri (born 1980) – hip hop artist and rapper
Dorothy Masuka (born 1935) – South Africa-based jazz singer
Paul Matavire (1961–2005) – blind jit musician[2]
Louis Mhlanga (born 1956) – South Africa-based Afrojazz singer-songwriter and guitarist
Obi Mhondera (born 1980) – England-based pop songwriter
Eric Moyo (born 1982) – singer
Tongai Moyo (1968–2011) – sungura singer-songwriter[2]
August Msarurgwa (1920–1968) – composer
Audius Mtawarira (born 1977) – Australia-based urban grooves artist
Oliver Mtukudzi (1952–2019) – Afrojazz singer-songwriter and guitarist
Sam Mtukudzi (1988–2010) – Afrojazz musician[2]
Anna Mudeka – England-based musician
Carol Mujokoro – gospel music artist
Ephat Mujuru (1950–2001) – mbira player[2]
Prince Kudakwashe Musarurwa (born 1988) – Afrojazz musician
Isaac Musekiwa – DR Congo-based soukous artist and saxophonist
Busi Ncube (born 1963) – mbira player and singer
Albert Nyathi (born 1962) – poet and singer-songwriter
Jah Prayzah, musician
Ramadu (born 1975) – singer-songwriter
Roki (born 1985) – Madagascar-born urban grooves artist
Kingsley Sambo (1936–1977) – jazz guitarist
Herbert Schwamborn (born 1973) – Germany-based hip hop and electronic music artist; member of the band Söhne Mannheims
Jonah Sithole (1952–1997) – chimurenga music artist and guitarist[2]
Solomon Skuza (1956–1995) – pop singer-songwriter[2]
Buffalo Souljah (born 1980) – Zimdancehall and reggae artist
Shingisai Suluma (born 1971) – gospel music artist
Takura (born 1991) – house music and hip hop artist
Tocky Vibes (born 1993) - Singer Lyricist Songwriter
System Tazvida (born 1968) – singer-songwriter
Biggie Tembo Jr. (born 1988) – jit musician
Clem Tholet (1948–2004) – Rhodesian guitarist and folk singer
Garikayi Tirikoti (born 1961) – mbira player
Diego Tryno (born 1998) - urban contemporary and hip-hop musician
Viomak – protest musician and activist
Patrick Mukwamba (born 1951) – pop singer
Tarisai Vushe (born 1987) – Australia-based singer who appeared on Australian Idol
Edith WeUtonga (born 1979) – Afrojazz singer-songwriter and bass guitarist
Winky D (born 1983) – dancehall and reggae artist
Jonathan Wutawunashe – gospel artist
Leonard Zhakata (born 1968) – sungura and adult contemporary music artist
Zinjaziyamluma- maskandi singer
Charity Zisengwe – contemporary Christian music artist
Soukous
Antoine Kolosoy, a.k.a. Papa Wendo
Aurlus Mabele
Awilo Longomba
Bozi Boziana
Diblo Dibala
Dindo Yogo
Dr Nico Kasanda
Empire Bakuba
Evoloko Jocker
Fally Ipupa
Ferre Gola
François Luambo Makiadi, band leader of OK Jazz
Grand Kalle, band leader of Grand Kalle et l'African Jazz
Kanda Bongo Man
Kasaloo Kyanga
King Kester Emeneya
Koffi Olomide
Les Quatre Étoiles 4 Etoiles
Loketo
M'bilia Bel
Meiway
Mose Fan Fan
Monique Séka
Nyboma
Oliver N'Goma
Papa Wemba
Pepe Kalle
Quartier Latin International
Les Quatre Étoiles
Remmy Ongala
Rigo Star
Sam Fan Thomas
Sam Mangwana
Samba Mapangala, band leader of Orchestra Virunga
Tabu Ley Rochereau, band leader of African Fiesta
Tshala Muana
Werrason
Yondo Sister
Zaiko Langa Langa""".split("\n")
from string import ascii_uppercase
data = {}
country = None
for line in txt:
if country is None:
country = line
data[country] = []
continue
if len(line) == 0:
country = None
continue
artist = line
artist = artist.split(" (born")[0]
artist = artist.split(" (1")[0]
artist = artist.split(" (b")[0]
artist = artist.split(" (c")[0]
artist = artist.split(" (p")[0]
artist = artist.split(", ")[0]
artist = artist.split(" – ")[0]
artist = artist.split(" - ")[0]
artist = artist.replace("(band)", "").strip()
artist = artist.replace("(group)", "").strip()
artist = artist.replace("(rapper)", "").strip()
if artist in ascii_uppercase:
continue
data[country].append(artist)
from pandas import Series
african_artists = DataFrame(Series(data))
african_artists.columns = ["Artists"]
african_artists.head()
saveFile(ifile="/Volumes/Piggy/Charts/data/africa/categories/Artists.p", idata=african_artists, debug=True)Saving data to /Volumes/Piggy/Charts/data/africa/categories/Artists.p
--> This file is 16.7kB.
Saved data to /Volumes/Piggy/Charts/data/africa/categories/Artists.p
--> This file is 16.7kB.
%autoreload
from Africa import africaData
africa = africaData()
africa.parse()Found 1 files.
Angola [Ngola Ritmos, Waldemar Bastos, Bonga, Teta La...
Benin [Angelique Kidjo, Wally Badarou]
Nigeria [2face Idibia, 9ice, A-Q, Abiodun Koya, Adé Ba...
Botswana [Banjo Mosele, Franco and Afro Musica, Matsien...
Cameroon [Njacko Backo, Francis Bebey, Moni Bilé, Dibou...
Cape Verde [Cesaria Evora, Gil Semedo, Côte d'Ivoire, Alp...
Republic of the Congo (Congo-Brazzaville) [Youlou Mabiala, Pierre Moutouari, Werrason, P...
Democratic Republic of the Congo (former Zaire) [Abeti Masikini, African Fiesta, Avelino, Awil...
Egypt [Amal Maher, Amira Selim, Amr Diab, Angham, An...
Eritrea [Abraham Afewerki]
Ethiopia [Aminé, Mulugeta Abate, Teddy Afro, Alemu Aga,...
Gambia [Sona Maya Jobarteh, Foday Musa Suso]
Ghana [Guy Warren, Rebop Kwaku Baah, Becca, DopeNati...
Kenya [Akothee, Avril, Ayub Ogada, Cece Sagini, Dadd...
Madagascar [AmbondronA, Vaiavy Chila, Mily Clément, Ninie...
Mali [Boubacar Traoré, Mory Kanté, Salif Keita, Tou...
Morocco [Saad Lamjarred, Elam Jay, AnoGhan, Oussama Be...
Rwanda [Alpha Rwirangira, Tom Close, Riderman, King J...
Senegal [Akon, Baaba Maal, Étoile de Dakar, Ismaël Lô,...
Sierra Leone [Bai Kamara, S. E. Rogie, Steady Bongo, K-Man,...
Somalia [Xiddigaha Geeska, Mohamed Mooge Liibaan, Abdu...
South Africa [African Children's Choir, Afrotraction, AKA, ...
South Sudan [Yaba Angelosi, Mary Boyoi, Emmanuel Jal, Silv...
Sudan [Abdel Aziz El Mubarak, Abdel Gadir Salim, AlK...
Swaziland [Dusty & Stones, Kambi, Tendaness]
Tanzania [Ali Kiba, Bill Nass, Joseph Lusungu, Mnenge R...
Togo [Bella Bellow, King Mensah]
Uganda [Holy Keane Amooti, Aziz Azion, A Pass, Afrigo...
Zambia [Alick Nkhata, B Flow, Ballad Zulu, Chef 187, ...
Zimbabwe [Barura Express, Bhundu Boys, Hohodza, Mbira d...
Soukous [Antoine Kolosoy, Aurlus Mabele, Awilo Longomb...
Name: Artists, dtype: object
Saving data to /Volumes/Piggy/Charts/data/africa/results/Angola.p
--> This file is 277B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Angola.p
--> This file is 277B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Benin.p
--> This file is 57B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Benin.p
--> This file is 57B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Nigeria.p
--> This file is 2.0kB.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Nigeria.p
--> This file is 2.0kB.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Botswana.p
--> This file is 189B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Botswana.p
--> This file is 189B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Cameroon.p
--> This file is 369B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Cameroon.p
--> This file is 369B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Cape_Verde.p
--> This file is 182B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Cape_Verde.p
--> This file is 182B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Republic_of_the_Congo_(Congo-Brazzaville).p
--> This file is 204B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Republic_of_the_Congo_(Congo-Brazzaville).p
--> This file is 204B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Democratic_Republic_of_the_Congo_(former_Zaire).p
--> This file is 832B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Democratic_Republic_of_the_Congo_(former_Zaire).p
--> This file is 832B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Egypt.p
--> This file is 326B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Egypt.p
--> This file is 326B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Eritrea.p
--> This file is 38B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Eritrea.p
--> This file is 38B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Ethiopia.p
--> This file is 515B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Ethiopia.p
--> This file is 515B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Gambia.p
--> This file is 62B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Gambia.p
--> This file is 62B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Ghana.p
--> This file is 1.2kB.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Ghana.p
--> This file is 1.2kB.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Kenya.p
--> This file is 541B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Kenya.p
--> This file is 541B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Madagascar.p
--> This file is 347B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Madagascar.p
--> This file is 347B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Mali.p
--> This file is 300B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Mali.p
--> This file is 300B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Morocco.p
--> This file is 447B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Morocco.p
--> This file is 447B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Rwanda.p
--> This file is 178B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Rwanda.p
--> This file is 178B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Senegal.p
--> This file is 220B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Senegal.p
--> This file is 220B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Sierra_Leone.p
--> This file is 122B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Sierra_Leone.p
--> This file is 122B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Somalia.p
--> This file is 208B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Somalia.p
--> This file is 208B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/South_Africa.p
--> This file is 3.8kB.
Saved data to /Volumes/Piggy/Charts/data/africa/results/South_Africa.p
--> This file is 3.8kB.
Saving data to /Volumes/Piggy/Charts/data/africa/results/South_Sudan.p
--> This file is 83B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/South_Sudan.p
--> This file is 83B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Sudan.p
--> This file is 118B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Sudan.p
--> This file is 118B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Swaziland.p
--> This file is 61B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Swaziland.p
--> This file is 61B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Tanzania.p
--> This file is 575B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Tanzania.p
--> This file is 575B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Togo.p
--> This file is 49B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Togo.p
--> This file is 49B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Uganda.p
--> This file is 1.1kB.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Uganda.p
--> This file is 1.1kB.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Zambia.p
--> This file is 365B.
Saved data to /Volumes/Piggy/Charts/data/africa/results/Zambia.p
--> This file is 365B.
Saving data to /Volumes/Piggy/Charts/data/africa/results/Zimbabwe.p
%autoreload
from Africa import africaData
africa = africaData()
#africa.setDBRenames(manDB)
#africa.setMultiDBRenames(multimanDB)
africa.setChartUsage(rank=[0,1,2,3])
africa.setFullChartData()
africa.setArtistAlbumData()
africa.saveArtistAlbumData()
africa.saveFullChartData()Using Charts For Rank 0
Categories: ['south_africa']
Chart: south_africa
Using 1 Charts
Using 1 Charts For Rank 0
Using Charts For Rank 1
Categories: ['egypt']
Chart: egypt
Using 1 Charts
Using 1 Charts For Rank 1
Using Charts For Rank 2
Categories: ['kenya']
Chart: kenya
Using 1 Charts
Using 1 Charts For Rank 2
Using Charts For Rank 3
Categories: ['ethiopia', 'eritrea']
Chart: ethiopia
Using 1 Charts
Chart: eritrea
Using 1 Charts
Using 2 Charts For Rank 3
=== ChartUsage ===
Using Charts (Rank=[0, 1, 2, 3]): ['South_Africa', 'Egypt', 'Kenya', 'Ethiopia', 'Eritrea']
Found 31 summary files
Using Egypt
Using Eritrea
Using Ethiopia
Using Kenya
Using South_Africa
Renamed 0 single artists
Saving 426 Artist Album Data to currentafricaArtistAlbumData.p
Saving data to currentafricaArtistAlbumData.p
--> This file is 6.4kB.
Saved data to currentafricaArtistAlbumData.p
--> This file is 6.4kB.
Saving 426 Full Artist Data
Saving data to currentafricaFullChartArtistAlbumData.p
--> This file is 8.9kB.
Saved data to currentafricaFullChartArtistAlbumData.p
--> This file is 8.9kB.
from searchUtils import findExt
from fileUtils import getBaseFilename
names = [getBaseFilename(ifile) for ifile in findExt("/Volumes/Piggy/Charts/data/africa/results/", ".p")]
for name in names:
key = name.replace("(", "")
key = key.replace(")", "")
key = key.replace("-", "_")
print("self.{0: <50} = ['{1}']".format(key.lower(), name))self.angola = ['Angola']
self.benin = ['Benin']
self.nigeria = ['Nigeria']
self.botswana = ['Botswana']
self.cameroon = ['Cameroon']
self.cape_verde = ['Cape_Verde']
self.republic_of_the_congo_congo_brazzaville = ['Republic_of_the_Congo_(Congo-Brazzaville)']
self.democratic_republic_of_the_congo_former_zaire = ['Democratic_Republic_of_the_Congo_(former_Zaire)']
self.egypt = ['Egypt']
self.eritrea = ['Eritrea']
self.ethiopia = ['Ethiopia']
self.gambia = ['Gambia']
self.ghana = ['Ghana']
self.kenya = ['Kenya']
self.madagascar = ['Madagascar']
self.mali = ['Mali']
self.morocco = ['Morocco']
self.rwanda = ['Rwanda']
self.senegal = ['Senegal']
self.sierra_leone = ['Sierra_Leone']
self.somalia = ['Somalia']
self.south_africa = ['South_Africa']
self.south_sudan = ['South_Sudan']
self.sudan = ['Sudan']
self.swaziland = ['Swaziland']
self.tanzania = ['Tanzania']
self.togo = ['Togo']
self.uganda = ['Uganda']
self.zambia = ['Zambia']
self.zimbabwe = ['Zimbabwe']
self.soukous = ['Soukous']
#uDisc.setDBRenames(manDB)
#uDisc.setMultiDBRenames(multimanDB)
uDisc.setChartUsage(rank=[0])
uDisc.setFullChartData()
uDisc.setArtistAlbumData()
uDisc.saveArtistAlbumData()
uDisc.saveFullChartData()_____no_output_____
</code>
| {
"repository": "tgadf/charts",
"path": "Africa.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 64948,
"hexsha": "d0ed20f2f0b4458b7d7407176b7f19a9ce03dba6",
"max_line_length": 127,
"avg_line_length": 33.7217030114,
"alphanum_fraction": 0.5684393669
} |
# Notebook from luissian/opentrons_covid19
Path: notebooks/S3/modulec_testing.ipynb
<code>
from opentrons import simulate
ctx = simulate.get_protocol_api('2.1')
NUM_SAMPLES = 48
VOLUME_MMIX = 20
ELUTION_LABWARE = '2ml tubes'
PREPARE_MASTERMIX = True
MM_TYPE = 'MM1'
EL_LW_DICT = {
'large strips': 'opentrons_96_aluminumblock_generic_pcr_strip_200ul',
'short strips': 'opentrons_96_aluminumblock_generic_pcr_strip_200ul',
'2ml tubes': 'opentrons_24_tuberack_generic_2ml_screwcap',
'1.5ml tubes': 'opentrons_24_tuberack_nest_1.5ml_screwcap'
}
C:\Users\Adm\.opentrons\deck_calibration.json not found. Loading defaults
C:\Users\Adm\.opentrons\robot_settings.json not found. Loading defaults
source_racks = [
ctx.load_labware(EL_LW_DICT[ELUTION_LABWARE], slot,
'RNA elution labware ' + str(i+1))
for i, slot in enumerate(['4', '5', '1', '2'])
]
tips20 = [
ctx.load_labware('opentrons_96_filtertiprack_20ul', slot)
for slot in ['6', '9', '8', '7']
]
tips300 = [ctx.load_labware('opentrons_96_filtertiprack_200ul', '3')]
tempdeck = ctx.load_module('tempdeck', '10')
pcr_plate = tempdeck.load_labware(
'biorad_96_wellplate_200ul_pcr', 'PCR plate')
tempdeck.set_temperature(4)
mm_rack = ctx.load_labware(
'opentrons_24_tuberack_generic_2ml_screwcap', '11',
'2ml screw tube aluminum block for mastermix')
# pipette
p20 = ctx.load_instrument('p20_single_gen2', 'right', tip_racks=tips20)
p300 = ctx.load_instrument('p300_single_gen2', 'left', tip_racks=tips300)
_____no_output_____print(source_racks)
print(tips20)
print(tips300)
print(tempdeck)
print(pcr_plate)
print(mm_rack)[RNA elution labware 1 on 4, RNA elution labware 2 on 5, RNA elution labware 3 on 1, RNA elution labware 4 on 2]
[Opentrons 96 Filter Tip Rack 20 µL on 6, Opentrons 96 Filter Tip Rack 20 µL on 9, Opentrons 96 Filter Tip Rack 20 µL on 8, Opentrons 96 Filter Tip Rack 20 µL on 7]
[Opentrons 96 Filter Tip Rack 200 µL on 3]
TemperatureModuleContext at Temperature Module on 10 lw PCR plate on Temperature Module on 10
PCR plate on Temperature Module on 10
2ml screw tube aluminum block for mastermix on 11
# Know which class cames the object from.
mm_rack.__class__
# Know which methods are available for the object.
dir(mm_rack)_____no_output_____# Example, access wells in rack object.
mm_rack.wells()_____no_output_____sources = [
tube
for rack in source_racks for tube in rack.wells()][:NUM_SAMPLES]
print(sources)[A1 of RNA elution labware 1 on 4, B1 of RNA elution labware 1 on 4, C1 of RNA elution labware 1 on 4, D1 of RNA elution labware 1 on 4, A2 of RNA elution labware 1 on 4, B2 of RNA elution labware 1 on 4, C2 of RNA elution labware 1 on 4, D2 of RNA elution labware 1 on 4, A3 of RNA elution labware 1 on 4, B3 of RNA elution labware 1 on 4, C3 of RNA elution labware 1 on 4, D3 of RNA elution labware 1 on 4, A4 of RNA elution labware 1 on 4, B4 of RNA elution labware 1 on 4, C4 of RNA elution labware 1 on 4, D4 of RNA elution labware 1 on 4, A5 of RNA elution labware 1 on 4, B5 of RNA elution labware 1 on 4, C5 of RNA elution labware 1 on 4, D5 of RNA elution labware 1 on 4, A6 of RNA elution labware 1 on 4, B6 of RNA elution labware 1 on 4, C6 of RNA elution labware 1 on 4, D6 of RNA elution labware 1 on 4, A1 of RNA elution labware 2 on 5, B1 of RNA elution labware 2 on 5, C1 of RNA elution labware 2 on 5, D1 of RNA elution labware 2 on 5, A2 of RNA elution labware 2 on 5, B2 of RNA elution labware 2 on 5, C2 of RNA elution labware 2 on 5, D2 of RNA elution labware 2 on 5, A3 of RNA elution labware 2 on 5, B3 of RNA elution labware 2 on 5, C3 of RNA elution labware 2 on 5, D3 of RNA elution labware 2 on 5, A4 of RNA elution labware 2 on 5, B4 of RNA elution labware 2 on 5, C4 of RNA elution labware 2 on 5, D4 of RNA elution labware 2 on 5, A5 of RNA elution labware 2 on 5, B5 of RNA elution labware 2 on 5, C5 of RNA elution labware 2 on 5, D5 of RNA elution labware 2 on 5, A6 of RNA elution labware 2 on 5, B6 of RNA elution labware 2 on 5, C6 of RNA elution labware 2 on 5, D6 of RNA elution labware 2 on 5]
sources=list()
for rack in source_racks:
for tube in rack.wells():
sources.append(tube)
print(sources[:NUM_SAMPLES])[A1 of RNA elution labware 1 on 4, B1 of RNA elution labware 1 on 4, C1 of RNA elution labware 1 on 4, D1 of RNA elution labware 1 on 4, A2 of RNA elution labware 1 on 4, B2 of RNA elution labware 1 on 4, C2 of RNA elution labware 1 on 4, D2 of RNA elution labware 1 on 4, A3 of RNA elution labware 1 on 4, B3 of RNA elution labware 1 on 4, C3 of RNA elution labware 1 on 4, D3 of RNA elution labware 1 on 4, A4 of RNA elution labware 1 on 4, B4 of RNA elution labware 1 on 4, C4 of RNA elution labware 1 on 4, D4 of RNA elution labware 1 on 4, A5 of RNA elution labware 1 on 4, B5 of RNA elution labware 1 on 4, C5 of RNA elution labware 1 on 4, D5 of RNA elution labware 1 on 4, A6 of RNA elution labware 1 on 4, B6 of RNA elution labware 1 on 4, C6 of RNA elution labware 1 on 4, D6 of RNA elution labware 1 on 4, A1 of RNA elution labware 2 on 5, B1 of RNA elution labware 2 on 5, C1 of RNA elution labware 2 on 5, D1 of RNA elution labware 2 on 5, A2 of RNA elution labware 2 on 5, B2 of RNA elution labware 2 on 5, C2 of RNA elution labware 2 on 5, D2 of RNA elution labware 2 on 5, A3 of RNA elution labware 2 on 5, B3 of RNA elution labware 2 on 5, C3 of RNA elution labware 2 on 5, D3 of RNA elution labware 2 on 5, A4 of RNA elution labware 2 on 5, B4 of RNA elution labware 2 on 5, C4 of RNA elution labware 2 on 5, D4 of RNA elution labware 2 on 5, A5 of RNA elution labware 2 on 5, B5 of RNA elution labware 2 on 5, C5 of RNA elution labware 2 on 5, D5 of RNA elution labware 2 on 5, A6 of RNA elution labware 2 on 5, B6 of RNA elution labware 2 on 5, C6 of RNA elution labware 2 on 5, D6 of RNA elution labware 2 on 5]
dests = [
well
for h_block in range(2)
for v_block in range(2)
for col in pcr_plate.columns()[6*v_block:6*(v_block+1)]
for well in col[4*h_block:4*(h_block+1)]][:NUM_SAMPLES]
print(dests)[A1 of PCR plate on Temperature Module on 10, B1 of PCR plate on Temperature Module on 10, C1 of PCR plate on Temperature Module on 10, D1 of PCR plate on Temperature Module on 10, A2 of PCR plate on Temperature Module on 10, B2 of PCR plate on Temperature Module on 10, C2 of PCR plate on Temperature Module on 10, D2 of PCR plate on Temperature Module on 10, A3 of PCR plate on Temperature Module on 10, B3 of PCR plate on Temperature Module on 10, C3 of PCR plate on Temperature Module on 10, D3 of PCR plate on Temperature Module on 10, A4 of PCR plate on Temperature Module on 10, B4 of PCR plate on Temperature Module on 10, C4 of PCR plate on Temperature Module on 10, D4 of PCR plate on Temperature Module on 10, A5 of PCR plate on Temperature Module on 10, B5 of PCR plate on Temperature Module on 10, C5 of PCR plate on Temperature Module on 10, D5 of PCR plate on Temperature Module on 10, A6 of PCR plate on Temperature Module on 10, B6 of PCR plate on Temperature Module on 10, C6 of PCR plate on Temperature Module on 10, D6 of PCR plate on Temperature Module on 10, A7 of PCR plate on Temperature Module on 10, B7 of PCR plate on Temperature Module on 10, C7 of PCR plate on Temperature Module on 10, D7 of PCR plate on Temperature Module on 10, A8 of PCR plate on Temperature Module on 10, B8 of PCR plate on Temperature Module on 10, C8 of PCR plate on Temperature Module on 10, D8 of PCR plate on Temperature Module on 10, A9 of PCR plate on Temperature Module on 10, B9 of PCR plate on Temperature Module on 10, C9 of PCR plate on Temperature Module on 10, D9 of PCR plate on Temperature Module on 10, A10 of PCR plate on Temperature Module on 10, B10 of PCR plate on Temperature Module on 10, C10 of PCR plate on Temperature Module on 10, D10 of PCR plate on Temperature Module on 10, A11 of PCR plate on Temperature Module on 10, B11 of PCR plate on Temperature Module on 10, C11 of PCR plate on Temperature Module on 10, D11 of PCR plate on Temperature Module on 10, A12 of PCR plate on Temperature Module on 10, B12 of PCR plate on Temperature Module on 10, C12 of PCR plate on Temperature Module on 10, D12 of PCR plate on Temperature Module on 10]
dests = list()
for h_block in range(2):
print("hblock = " + str(h_block))
for v_block in range(2):
print("vblock = " + str(v_block))
for col in pcr_plate.columns()[6*v_block:6*(v_block+1)]:
print("col = " + str(col))
for well in col[4*h_block:4*(h_block+1)]:
print(well)
dests.append(well)
dests = dests[:NUM_SAMPLES] hblock = 0
vblock = 0
col = [A1 of PCR plate on Temperature Module on 10, B1 of PCR plate on Temperature Module on 10, C1 of PCR plate on Temperature Module on 10, D1 of PCR plate on Temperature Module on 10, E1 of PCR plate on Temperature Module on 10, F1 of PCR plate on Temperature Module on 10, G1 of PCR plate on Temperature Module on 10, H1 of PCR plate on Temperature Module on 10]
A1 of PCR plate on Temperature Module on 10
B1 of PCR plate on Temperature Module on 10
C1 of PCR plate on Temperature Module on 10
D1 of PCR plate on Temperature Module on 10
col = [A2 of PCR plate on Temperature Module on 10, B2 of PCR plate on Temperature Module on 10, C2 of PCR plate on Temperature Module on 10, D2 of PCR plate on Temperature Module on 10, E2 of PCR plate on Temperature Module on 10, F2 of PCR plate on Temperature Module on 10, G2 of PCR plate on Temperature Module on 10, H2 of PCR plate on Temperature Module on 10]
A2 of PCR plate on Temperature Module on 10
B2 of PCR plate on Temperature Module on 10
C2 of PCR plate on Temperature Module on 10
D2 of PCR plate on Temperature Module on 10
col = [A3 of PCR plate on Temperature Module on 10, B3 of PCR plate on Temperature Module on 10, C3 of PCR plate on Temperature Module on 10, D3 of PCR plate on Temperature Module on 10, E3 of PCR plate on Temperature Module on 10, F3 of PCR plate on Temperature Module on 10, G3 of PCR plate on Temperature Module on 10, H3 of PCR plate on Temperature Module on 10]
A3 of PCR plate on Temperature Module on 10
B3 of PCR plate on Temperature Module on 10
C3 of PCR plate on Temperature Module on 10
D3 of PCR plate on Temperature Module on 10
col = [A4 of PCR plate on Temperature Module on 10, B4 of PCR plate on Temperature Module on 10, C4 of PCR plate on Temperature Module on 10, D4 of PCR plate on Temperature Module on 10, E4 of PCR plate on Temperature Module on 10, F4 of PCR plate on Temperature Module on 10, G4 of PCR plate on Temperature Module on 10, H4 of PCR plate on Temperature Module on 10]
A4 of PCR plate on Temperature Module on 10
B4 of PCR plate on Temperature Module on 10
C4 of PCR plate on Temperature Module on 10
D4 of PCR plate on Temperature Module on 10
col = [A5 of PCR plate on Temperature Module on 10, B5 of PCR plate on Temperature Module on 10, C5 of PCR plate on Temperature Module on 10, D5 of PCR plate on Temperature Module on 10, E5 of PCR plate on Temperature Module on 10, F5 of PCR plate on Temperature Module on 10, G5 of PCR plate on Temperature Module on 10, H5 of PCR plate on Temperature Module on 10]
A5 of PCR plate on Temperature Module on 10
B5 of PCR plate on Temperature Module on 10
C5 of PCR plate on Temperature Module on 10
D5 of PCR plate on Temperature Module on 10
col = [A6 of PCR plate on Temperature Module on 10, B6 of PCR plate on Temperature Module on 10, C6 of PCR plate on Temperature Module on 10, D6 of PCR plate on Temperature Module on 10, E6 of PCR plate on Temperature Module on 10, F6 of PCR plate on Temperature Module on 10, G6 of PCR plate on Temperature Module on 10, H6 of PCR plate on Temperature Module on 10]
A6 of PCR plate on Temperature Module on 10
B6 of PCR plate on Temperature Module on 10
C6 of PCR plate on Temperature Module on 10
D6 of PCR plate on Temperature Module on 10
vblock = 1
col = [A7 of PCR plate on Temperature Module on 10, B7 of PCR plate on Temperature Module on 10, C7 of PCR plate on Temperature Module on 10, D7 of PCR plate on Temperature Module on 10, E7 of PCR plate on Temperature Module on 10, F7 of PCR plate on Temperature Module on 10, G7 of PCR plate on Temperature Module on 10, H7 of PCR plate on Temperature Module on 10]
A7 of PCR plate on Temperature Module on 10
B7 of PCR plate on Temperature Module on 10
C7 of PCR plate on Temperature Module on 10
D7 of PCR plate on Temperature Module on 10
col = [A8 of PCR plate on Temperature Module on 10, B8 of PCR plate on Temperature Module on 10, C8 of PCR plate on Temperature Module on 10, D8 of PCR plate on Temperature Module on 10, E8 of PCR plate on Temperature Module on 10, F8 of PCR plate on Temperature Module on 10, G8 of PCR plate on Temperature Module on 10, H8 of PCR plate on Temperature Module on 10]
A8 of PCR plate on Temperature Module on 10
B8 of PCR plate on Temperature Module on 10
C8 of PCR plate on Temperature Module on 10
D8 of PCR plate on Temperature Module on 10
col = [A9 of PCR plate on Temperature Module on 10, B9 of PCR plate on Temperature Module on 10, C9 of PCR plate on Temperature Module on 10, D9 of PCR plate on Temperature Module on 10, E9 of PCR plate on Temperature Module on 10, F9 of PCR plate on Temperature Module on 10, G9 of PCR plate on Temperature Module on 10, H9 of PCR plate on Temperature Module on 10]
A9 of PCR plate on Temperature Module on 10
B9 of PCR plate on Temperature Module on 10
C9 of PCR plate on Temperature Module on 10
D9 of PCR plate on Temperature Module on 10
col = [A10 of PCR plate on Temperature Module on 10, B10 of PCR plate on Temperature Module on 10, C10 of PCR plate on Temperature Module on 10, D10 of PCR plate on Temperature Module on 10, E10 of PCR plate on Temperature Module on 10, F10 of PCR plate on Temperature Module on 10, G10 of PCR plate on Temperature Module on 10, H10 of PCR plate on Temperature Module on 10]
A10 of PCR plate on Temperature Module on 10
B10 of PCR plate on Temperature Module on 10
C10 of PCR plate on Temperature Module on 10
D10 of PCR plate on Temperature Module on 10
col = [A11 of PCR plate on Temperature Module on 10, B11 of PCR plate on Temperature Module on 10, C11 of PCR plate on Temperature Module on 10, D11 of PCR plate on Temperature Module on 10, E11 of PCR plate on Temperature Module on 10, F11 of PCR plate on Temperature Module on 10, G11 of PCR plate on Temperature Module on 10, H11 of PCR plate on Temperature Module on 10]
A11 of PCR plate on Temperature Module on 10
B11 of PCR plate on Temperature Module on 10
C11 of PCR plate on Temperature Module on 10
D11 of PCR plate on Temperature Module on 10
col = [A12 of PCR plate on Temperature Module on 10, B12 of PCR plate on Temperature Module on 10, C12 of PCR plate on Temperature Module on 10, D12 of PCR plate on Temperature Module on 10, E12 of PCR plate on Temperature Module on 10, F12 of PCR plate on Temperature Module on 10, G12 of PCR plate on Temperature Module on 10, H12 of PCR plate on Temperature Module on 10]
A12 of PCR plate on Temperature Module on 10
B12 of PCR plate on Temperature Module on 10
C12 of PCR plate on Temperature Module on 10
D12 of PCR plate on Temperature Module on 10
hblock = 1
vblock = 0
col = [A1 of PCR plate on Temperature Module on 10, B1 of PCR plate on Temperature Module on 10, C1 of PCR plate on Temperature Module on 10, D1 of PCR plate on Temperature Module on 10, E1 of PCR plate on Temperature Module on 10, F1 of PCR plate on Temperature Module on 10, G1 of PCR plate on Temperature Module on 10, H1 of PCR plate on Temperature Module on 10]
E1 of PCR plate on Temperature Module on 10
F1 of PCR plate on Temperature Module on 10
G1 of PCR plate on Temperature Module on 10
H1 of PCR plate on Temperature Module on 10
col = [A2 of PCR plate on Temperature Module on 10, B2 of PCR plate on Temperature Module on 10, C2 of PCR plate on Temperature Module on 10, D2 of PCR plate on Temperature Module on 10, E2 of PCR plate on Temperature Module on 10, F2 of PCR plate on Temperature Module on 10, G2 of PCR plate on Temperature Module on 10, H2 of PCR plate on Temperature Module on 10]
E2 of PCR plate on Temperature Module on 10
F2 of PCR plate on Temperature Module on 10
G2 of PCR plate on Temperature Module on 10
H2 of PCR plate on Temperature Module on 10
col = [A3 of PCR plate on Temperature Module on 10, B3 of PCR plate on Temperature Module on 10, C3 of PCR plate on Temperature Module on 10, D3 of PCR plate on Temperature Module on 10, E3 of PCR plate on Temperature Module on 10, F3 of PCR plate on Temperature Module on 10, G3 of PCR plate on Temperature Module on 10, H3 of PCR plate on Temperature Module on 10]
E3 of PCR plate on Temperature Module on 10
F3 of PCR plate on Temperature Module on 10
G3 of PCR plate on Temperature Module on 10
H3 of PCR plate on Temperature Module on 10
col = [A4 of PCR plate on Temperature Module on 10, B4 of PCR plate on Temperature Module on 10, C4 of PCR plate on Temperature Module on 10, D4 of PCR plate on Temperature Module on 10, E4 of PCR plate on Temperature Module on 10, F4 of PCR plate on Temperature Module on 10, G4 of PCR plate on Temperature Module on 10, H4 of PCR plate on Temperature Module on 10]
E4 of PCR plate on Temperature Module on 10
F4 of PCR plate on Temperature Module on 10
G4 of PCR plate on Temperature Module on 10
H4 of PCR plate on Temperature Module on 10
col = [A5 of PCR plate on Temperature Module on 10, B5 of PCR plate on Temperature Module on 10, C5 of PCR plate on Temperature Module on 10, D5 of PCR plate on Temperature Module on 10, E5 of PCR plate on Temperature Module on 10, F5 of PCR plate on Temperature Module on 10, G5 of PCR plate on Temperature Module on 10, H5 of PCR plate on Temperature Module on 10]
E5 of PCR plate on Temperature Module on 10
F5 of PCR plate on Temperature Module on 10
G5 of PCR plate on Temperature Module on 10
H5 of PCR plate on Temperature Module on 10
col = [A6 of PCR plate on Temperature Module on 10, B6 of PCR plate on Temperature Module on 10, C6 of PCR plate on Temperature Module on 10, D6 of PCR plate on Temperature Module on 10, E6 of PCR plate on Temperature Module on 10, F6 of PCR plate on Temperature Module on 10, G6 of PCR plate on Temperature Module on 10, H6 of PCR plate on Temperature Module on 10]
E6 of PCR plate on Temperature Module on 10
F6 of PCR plate on Temperature Module on 10
G6 of PCR plate on Temperature Module on 10
H6 of PCR plate on Temperature Module on 10
vblock = 1
col = [A7 of PCR plate on Temperature Module on 10, B7 of PCR plate on Temperature Module on 10, C7 of PCR plate on Temperature Module on 10, D7 of PCR plate on Temperature Module on 10, E7 of PCR plate on Temperature Module on 10, F7 of PCR plate on Temperature Module on 10, G7 of PCR plate on Temperature Module on 10, H7 of PCR plate on Temperature Module on 10]
E7 of PCR plate on Temperature Module on 10
F7 of PCR plate on Temperature Module on 10
G7 of PCR plate on Temperature Module on 10
H7 of PCR plate on Temperature Module on 10
col = [A8 of PCR plate on Temperature Module on 10, B8 of PCR plate on Temperature Module on 10, C8 of PCR plate on Temperature Module on 10, D8 of PCR plate on Temperature Module on 10, E8 of PCR plate on Temperature Module on 10, F8 of PCR plate on Temperature Module on 10, G8 of PCR plate on Temperature Module on 10, H8 of PCR plate on Temperature Module on 10]
E8 of PCR plate on Temperature Module on 10
F8 of PCR plate on Temperature Module on 10
G8 of PCR plate on Temperature Module on 10
H8 of PCR plate on Temperature Module on 10
col = [A9 of PCR plate on Temperature Module on 10, B9 of PCR plate on Temperature Module on 10, C9 of PCR plate on Temperature Module on 10, D9 of PCR plate on Temperature Module on 10, E9 of PCR plate on Temperature Module on 10, F9 of PCR plate on Temperature Module on 10, G9 of PCR plate on Temperature Module on 10, H9 of PCR plate on Temperature Module on 10]
E9 of PCR plate on Temperature Module on 10
F9 of PCR plate on Temperature Module on 10
G9 of PCR plate on Temperature Module on 10
H9 of PCR plate on Temperature Module on 10
col = [A10 of PCR plate on Temperature Module on 10, B10 of PCR plate on Temperature Module on 10, C10 of PCR plate on Temperature Module on 10, D10 of PCR plate on Temperature Module on 10, E10 of PCR plate on Temperature Module on 10, F10 of PCR plate on Temperature Module on 10, G10 of PCR plate on Temperature Module on 10, H10 of PCR plate on Temperature Module on 10]
E10 of PCR plate on Temperature Module on 10
F10 of PCR plate on Temperature Module on 10
G10 of PCR plate on Temperature Module on 10
H10 of PCR plate on Temperature Module on 10
col = [A11 of PCR plate on Temperature Module on 10, B11 of PCR plate on Temperature Module on 10, C11 of PCR plate on Temperature Module on 10, D11 of PCR plate on Temperature Module on 10, E11 of PCR plate on Temperature Module on 10, F11 of PCR plate on Temperature Module on 10, G11 of PCR plate on Temperature Module on 10, H11 of PCR plate on Temperature Module on 10]
E11 of PCR plate on Temperature Module on 10
F11 of PCR plate on Temperature Module on 10
G11 of PCR plate on Temperature Module on 10
H11 of PCR plate on Temperature Module on 10
col = [A12 of PCR plate on Temperature Module on 10, B12 of PCR plate on Temperature Module on 10, C12 of PCR plate on Temperature Module on 10, D12 of PCR plate on Temperature Module on 10, E12 of PCR plate on Temperature Module on 10, F12 of PCR plate on Temperature Module on 10, G12 of PCR plate on Temperature Module on 10, H12 of PCR plate on Temperature Module on 10]
E12 of PCR plate on Temperature Module on 10
F12 of PCR plate on Temperature Module on 10
G12 of PCR plate on Temperature Module on 10
H12 of PCR plate on Temperature Module on 10
max_trans_per_asp = 8
#print(max_trans_per_asp)
split_ind = [ind for ind in range(0, NUM_SAMPLES, max_trans_per_asp)]
dest_sets = [dests[split_ind[i]:split_ind[i+1]]
for i in range(len(split_ind)-1)] + [dests[split_ind[-1]:]]
dest_sets_____no_output_____
</code>
| {
"repository": "luissian/opentrons_covid19",
"path": "notebooks/S3/modulec_testing.ipynb",
"matched_keywords": [
"RNA"
],
"stars": null,
"size": 33967,
"hexsha": "d0edd81ebd277080527e53da3f5a2dcb13c60b88",
"max_line_length": 2182,
"avg_line_length": 59.6959578207,
"alphanum_fraction": 0.6486884329
} |
# Notebook from krassowski/meningitis-integration
Path: analyses/notes/Limma_expects_log_transformed_data.ipynb
<code>
from helpers.utilities import *
%run helpers/notebook_setup.ipynb_____no_output_____
</code>
While attempting to compare limma's results for log-transformed an non-transformed data, it was noticed (and brought up by Dr Tim) That the values of logFC produced by limma for non-transformed data are of wrong order of magnitude._____no_output_____I have investigated the issue, following the limma calculations for non-transformed data step by step:_____no_output_____
<code>
indexed_by_target_path = 'data/clean/protein/indexed_by_target.csv'
clinical_path = 'data/clean/protein/clinical_data_ordered_to_match_proteins_matrix.csv'_____no_output_____clinical = read_csv(clinical_path, index_col=0)
raw_protein_matrix = read_csv(indexed_by_target_path, index_col=0)_____no_output_____by_condition = clinical.Meningitis_____no_output_____tb_lysozyme = raw_protein_matrix[
raw_protein_matrix.columns[by_condition == 'Tuberculosis']
].loc['Lysozyme'].mean()_____no_output_____hc_lysozyme = raw_protein_matrix[
raw_protein_matrix.columns[by_condition == 'Healthy control']
].loc['Lysozyme'].mean()_____no_output_____tb_lysozyme / hc_lysozyme_____no_output_____tb_lysozyme_____no_output_____hc_lysozyme_____no_output_____
</code>
While for the transformed data:_____no_output_____
<code>
from numpy import log10_____no_output_____log10(tb_lysozyme)_____no_output_____log10(hc_lysozyme)_____no_output_____log10(tb_lysozyme) / log10(hc_lysozyme)_____no_output_____protein_matrix = raw_protein_matrix.apply(log10)_____no_output_____%%R -i protein_matrix -i by_condition
import::here(space_to_dot, dot_to_space, .from='helpers/utilities.R')
import::here(
limma_fit, limma_diff_ebayes, full_table,
design_from_conditions, calculate_means,
.from='helpers/differential_expression.R'
)
diff_ebayes = function(a, b, data=protein_matrix, conditions_vector=by_condition, ...) {
limma_diff_ebayes(a, b, data=data, conditions_vector=conditions_vector, ...)
}_____no_output_____%%R -o tb_all_proteins_raw -i raw_protein_matrix
result = diff_ebayes('Tuberculosis', 'Healthy control', data=raw_protein_matrix)
tb_all_proteins_raw = full_table(result)_____no_output_____%%R
head(full_table(result, coef=1)) logFC AveExpr t P.Value
Lysozyme 61798.20 67997.26 12.34222 3.414236e-20
TIMP-1 65320.26 89128.78 11.82749 3.121111e-19
IGFBP-4 104840.02 186800.74 11.56193 9.882769e-19
C3d 124850.49 99248.92 11.43494 1.719287e-18
Cyclophilin A 130136.76 117191.29 11.15072 5.970601e-18
14-3-3 protein zeta/delta 141689.40 105857.89 10.58352 7.404860e-17
adj.P.Val B protein
Lysozyme 4.455578e-17 -4.254329 Lysozyme
TIMP-1 2.036525e-16 -4.264678 TIMP-1
IGFBP-4 4.299004e-16 -4.270296 IGFBP-4
C3d 5.609172e-16 -4.273051 C3d
Cyclophilin A 1.558327e-15 -4.279385 Cyclophilin A
14-3-3 protein zeta/delta 1.509659e-14 -4.292907 14-3-3 protein zeta/delta
%%R
# logFC is taken from the coefficient of fit (result):
# it seems that the coefficients do not represent the FC as would expected...
result$coefficients['Lysozyme', ][1] 61798.2
</code>
We can trace it back to:_____no_output_____
<code>
%%R
fit = limma_fit(
data=raw_protein_matrix, conditions_vector=by_condition,
a='Tuberculosis', b='Healthy control'
)_____no_output_____%%R
fit$coefficients['Lysozyme', ][1] 61798.2
</code>
It changes when using using only the data from TB and HC, though it continues to produce large values:_____no_output_____
<code>
%%R
fit = limma_fit(
data=raw_protein_matrix, conditions_vector=by_condition,
a='Tuberculosis', b='Healthy control', use_all=F
)_____no_output_____%%R
fit$coefficients['Lysozyme', ]Intercept Group
59749.21 30899.10
</code>
Getting back to the previous version, we can see that the meansare correctly calculated:_____no_output_____
<code>
%%R
design <- design_from_conditions(by_condition)
fit <- calculate_means(raw_protein_matrix, design)_____no_output_____%%R
fit$coefficients['Lysozyme', ] (Intercept) Healthy.control Tuberculosis Viral
84617.54 -55767.43 6030.77 -17925.30
tb_lysozyme, hc_lysozyme_____no_output_____%%R
contrast_specification <- paste(
space_to_dot('Tuberculosis'),
space_to_dot('Healthy control'),
sep='-'
)
contrast.matrix <- limma::makeContrasts(contrasts=contrast_specification, levels=design)
contrast.matrix Contrasts
Levels Tuberculosis-Healthy.control
Intercept 0
Healthy.control -1
Tuberculosis 1
Viral 0
</code>
There is only one step more:
> fit <- limma::contrasts.fit(fit, contrast.matrix)
so the problem must be here_____no_output_____
<code>
%%R
fit_contrasted <- limma::contrasts.fit(fit, contrast.matrix)
fit_contrasted$coefficients['Lysozyme', ][1] 61798.2
</code>
Note the result we got: 61798.20 is:_____no_output_____
<code>
tb_lysozyme - hc_lysozyme_____no_output_____%%R
final_fit = limma::eBayes(fit_contrasted, trend=T, robust=T)
final_fit$coefficients['Lysozyme', ][1] 61798.2
</code>
This shows that limma does not produce the fold change at all._____no_output_____This is because it assumes that the data are log-transformed upfront. **If we gave it log-transformed data, the difference of logs would be equivalent to division.**_____no_output_____
| {
"repository": "krassowski/meningitis-integration",
"path": "analyses/notes/Limma_expects_log_transformed_data.ipynb",
"matched_keywords": [
"limma"
],
"stars": null,
"size": 12830,
"hexsha": "d0f131baaa00107986feeb5b07ea5f5bdbd8988d",
"max_line_length": 237,
"avg_line_length": 21.9691780822,
"alphanum_fraction": 0.5057677319
} |
# Notebook from classofai/Route-Planner
Path: project_notebook.ipynb
# Implementing a Route Planner
In this project you will use A\* search to implement a "Google-maps" style route planning algorithm._____no_output_____## The Map_____no_output_____
<code>
# Run this cell first!
from helpers import Map, load_map_10, load_map_40, show_map
import math
%load_ext autoreload
%autoreload 2_____no_output_____
</code>
### Map Basics_____no_output_____
<code>
map_10 = load_map_10()
show_map(map_10)_____no_output_____
</code>
The map above (run the code cell if you don't see it) shows a disconnected network of 10 intersections. The two intersections on the left are connected to each other but they are not connected to the rest of the road network. This map is quite literal in its expression of distance and connectivity. On the graph above, the edge between 2 nodes(intersections) represents a literal straight road not just an abstract connection of 2 cities.
These `Map` objects have two properties you will want to use to implement A\* search: `intersections` and `roads`
**Intersections**
The `intersections` are represented as a dictionary.
In this example, there are 10 intersections, each identified by an x,y coordinate. The coordinates are listed below. You can hover over each dot in the map above to see the intersection number._____no_output_____map_10.intersections_____no_output_____**Roads**
The `roads` property is a list where `roads[i]` contains a list of the intersections that intersection `i` connects to._____no_output_____
<code>
# this shows that intersection 0 connects to intersections 7, 6, and 5
map_10.roads[0] _____no_output_____# This shows the full connectivity of the map
map_10.roads_____no_output_____# map_40 is a bigger map than map_10
map_40 = load_map_40()
show_map(map_40)_____no_output_____
</code>
### Advanced Visualizations
The map above shows a network of roads which spans 40 different intersections (labeled 0 through 39).
The `show_map` function which generated this map also takes a few optional parameters which might be useful for visualizaing the output of the search algorithm you will write.
* `start` - The "start" node for the search algorithm.
* `goal` - The "goal" node.
* `path` - An array of integers which corresponds to a valid sequence of intersection visits on the map._____no_output_____
<code>
# run this code, note the effect of including the optional
# parameters in the function call.
show_map(map_40, start=5, goal=34, path=[5,16,37,12,34])_____no_output_____
</code>
## The Algorithm
### Writing your algorithm
The algorithm written will be responsible for generating a `path` like the one passed into `show_map` above. In fact, when called with the same map, start and goal, as above you algorithm should produce the path `[5, 16, 37, 12, 34]`. However you must complete several methods before it will work.
```bash
> PathPlanner(map_40, 5, 34).path
[5, 16, 37, 12, 34]
```_____no_output_____
<code>
# Do not change this cell
# When you write your methods correctly this cell will execute
# without problems
class PathPlanner():
"""Construct a PathPlanner Object"""
def __init__(self, M, start=None, goal=None):
""" """
self.map = M
self.start= start
self.goal = goal
self.closedSet = self.create_closedSet() if goal != None and start != None else None
self.openSet = self.create_openSet() if goal != None and start != None else None
self.cameFrom = self.create_cameFrom() if goal != None and start != None else None
self.gScore = self.create_gScore() if goal != None and start != None else None
self.fScore = self.create_fScore() if goal != None and start != None else None
self.path = self.run_search() if self.map and self.start != None and self.goal != None else None
def get_path(self):
""" Reconstructs path after search """
if self.path:
return self.path
else :
self.run_search()
return self.path
def reconstruct_path(self, current):
""" Reconstructs path after search """
total_path = [current]
while current in self.cameFrom.keys():
current = self.cameFrom[current]
total_path.append(current)
return total_path
def _reset(self):
"""Private method used to reset the closedSet, openSet, cameFrom, gScore, fScore, and path attributes"""
self.closedSet = None
self.openSet = None
self.cameFrom = None
self.gScore = None
self.fScore = None
self.path = self.run_search() if self.map and self.start and self.goal else None
def run_search(self):
""" """
if self.map == None:
raise(ValueError, "Must create map before running search. Try running PathPlanner.set_map(start_node)")
if self.goal == None:
raise(ValueError, "Must create goal node before running search. Try running PathPlanner.set_goal(start_node)")
if self.start == None:
raise(ValueError, "Must create start node before running search. Try running PathPlanner.set_start(start_node)")
self.closedSet = self.closedSet if self.closedSet != None else self.create_closedSet()
self.openSet = self.openSet if self.openSet != None else self.create_openSet()
self.cameFrom = self.cameFrom if self.cameFrom != None else self.create_cameFrom()
self.gScore = self.gScore if self.gScore != None else self.create_gScore()
self.fScore = self.fScore if self.fScore != None else self.create_fScore()
while not self.is_open_empty():
current = self.get_current_node()
if current == self.goal:
self.path = [x for x in reversed(self.reconstruct_path(current))]
return self.path
else:
self.openSet.remove(current)
self.closedSet.add(current)
for neighbor in self.get_neighbors(current):
if neighbor in self.closedSet:
continue # Ignore the neighbor which is already evaluated.
if not neighbor in self.openSet: # Discover a new node
self.openSet.add(neighbor)
# The distance from start to a neighbor
#the "dist_between" function may vary as per the solution requirements.
if self.get_tenative_gScore(current, neighbor) >= self.get_gScore(neighbor):
continue # This is not a better path.
# This path is the best until now. Record it!
self.record_best_path_to(current, neighbor)
print("No Path Found")
self.path = None
return False_____no_output_____
</code>
Create the following methods:_____no_output_____
<code>
def create_closedSet(self):
""" Creates and returns a data structure suitable to hold the set of nodes already evaluated"""
# TODO: return a data structure suitable to hold the set of nodes already evaluated
return set()_____no_output_____def create_openSet(self):
""" Creates and returns a data structure suitable to hold the set of currently discovered nodes
that are not evaluated yet. Initially, only the start node is known."""
if self.start != None:
# TODO: return a data structure suitable to hold the set of currently discovered nodes
# that are not evaluated yet. Make sure to include the start node.
self.open_set = set()
self.open_set.add(self.start)
return self.open_set
raise(ValueError, "Must create start node before creating an open set. Try running PathPlanner.set_start(start_node)")_____no_output_____def create_cameFrom(self):
"""Creates and returns a data structure that shows which node can most efficiently be reached from another,
for each node."""
# TODO: return a data structure that shows which node can most efficiently be reached from another,
# for each node.
self.come_from = {}
return self.come_from_____no_output_____def create_gScore(self):
"""Creates and returns a data structure that holds the cost of getting from the start node to that node, for each node.
The cost of going from start to start is zero."""
# TODO: a data structure that holds the cost of getting from the start node to that node, for each node.
# for each node. The cost of going from start to start is zero. The rest of the node's values should be set to infinity.
self.gScore = {}
nodes_index = len(self.map.roads)
for node in range(nodes_index):
if node == self.start:
self.gScore[node] = 0
continue
self.gScore[node] = math.inf
return self.gScore _____no_output_____def create_fScore(self):
"""Creates and returns a data structure that holds the total cost of getting from the start node to the goal
by passing by that node, for each node. That value is partly known, partly heuristic.
For the first node, that value is completely heuristic."""
# TODO: a data structure that holds the total cost of getting from the start node to the goal
# by passing by that node, for each node. That value is partly known, partly heuristic.
# For the first node, that value is completely heuristic. The rest of the node's value should be
# set to infinity.
self.fScore = {}
nodes_index = len(self.map.roads)
for node in range(nodes_index):
if node == self.start:
self.fScore[node] = heuristic_cost_estimate(self, self.start)
continue
self.fScore[node] = math.inf
return self.fScore
_____no_output_____def set_map(self, M):
"""Method used to set map attribute """
self._reset(self)
self.start = None
self.goal = None
# TODO: Set map to new value.
self.map = M_____no_output_____def set_start(self, start):
"""Method used to set start attribute """
self._reset(self)
# TODO: Set start value. Remember to remove goal, closedSet, openSet, cameFrom, gScore, fScore,
# and path attributes' values.
self.start = start_____no_output_____def set_goal(self, goal):
"""Method used to set goal attribute """
self._reset(self)
# TODO: Set goal value.
self.goal = goal_____no_output_____def get_current_node(self):
""" Returns the node in the open set with the lowest value of f(node)."""
# TODO: Return the node in the open set with the lowest value of f(node).
current_node = {}
for node in self.open_set:
if node in self.fScore.keys():
calculate_fscore(self, node)
current_node[node] = self.fScore[node]
current = min(current_node, key=current_node.get)
return current _____no_output_____def get_neighbors(self, node):
"""Returns the neighbors of a node"""
# TODO: Return the neighbors of a node
return self.map.roads[node]_____no_output_____def get_gScore(self, node):
"""Returns the g Score of a node"""
# TODO: Return the g Score of a node
return self.gScore[node]_____no_output_____def get_tenative_gScore(self, current, neighbor):
"""Returns the tenative g Score of a node"""
# TODO: Return the g Score of the current node
# plus distance from the current node to it's neighbors
tenative_gScore = self.gScore[current] + distance(self, current, neighbor)
return tenative_gScore
_____no_output_____def is_open_empty(self):
"""returns True if the open set is empty. False otherwise. """
# TODO: Return True if the open set is empty. False otherwise.
return len(self.open_set) == 0_____no_output_____def distance(self, node_1, node_2):
""" Computes the Euclidean L2 Distance"""
# TODO: Compute and return the Euclidean L2 Distance
x1, y1 = self.map.intersections[node_1]
x2, y2 = self.map.intersections[node_2]
euclidian_dist = math.sqrt( pow((x2-x1),2) + pow((y2-y1),2))
return euclidian_dist_____no_output_____def heuristic_cost_estimate(self, node):
""" Returns the heuristic cost estimate of a node """
# TODO: Return the heuristic cost estimate of a node
x1, y1 = self.map.intersections[node]
x2, y2 = self.map.intersections[self.goal]
heuristic_cost_node = math.sqrt( pow((x2-x1),2) + pow((y2-y1),2))
return heuristic_cost_node_____no_output_____def calculate_fscore(self, node):
"""Calculate the f score of a node. """
# TODO: Calculate and returns the f score of a node.
# REMEMBER F = G + H
self.gScore[node] = get_gScore(self, node)
self.fScore[node] = self.gScore[node] + heuristic_cost_estimate(self, node)
return self.fScore
_____no_output_____def record_best_path_to(self, current, neighbor):
"""Record the best path to a node """
# TODO: Record the best path to a node, by updating cameFrom, gScore, and fScore
self.come_from[neighbor] = current
self.gScore[neighbor] = get_tenative_gScore(self, current, neighbor)
self.fScore[neighbor] = self.gScore[neighbor] + heuristic_cost_estimate(self, neighbor)
_____no_output_____PathPlanner.create_closedSet = create_closedSet
PathPlanner.create_openSet = create_openSet
PathPlanner.create_cameFrom = create_cameFrom
PathPlanner.create_gScore = create_gScore
PathPlanner.create_fScore = create_fScore
#PathPlanner._reset = _reset
PathPlanner.set_map = set_map
PathPlanner.set_start = set_start
PathPlanner.set_goal = set_goal
PathPlanner.get_current_node = get_current_node
PathPlanner.get_neighbors = get_neighbors
PathPlanner.get_gScore = get_gScore
PathPlanner.get_tenative_gScore = get_tenative_gScore
PathPlanner.is_open_empty = is_open_empty
PathPlanner.distance = distance
PathPlanner.heuristic_cost_estimate = heuristic_cost_estimate
PathPlanner.calculate_fscore = calculate_fscore
PathPlanner.record_best_path_to = record_best_path_to_____no_output_____planner = PathPlanner(map_40, 5, 34)
path = planner.path
if path == [5, 16, 37, 12, 34]:
print("great! Your code works for these inputs!")
else:
print("something is off, your code produced the following:")
print(path)great! Your code works for these inputs!
</code>
### Testing your Code
If the code below produces no errors, your algorithm is behaving correctly. You are almost ready to submit! Before you submit, go through the following submission checklist:
**Submission Checklist**
1. Does my code pass all tests?
2. Does my code implement `A*` search and not some other search algorithm?
3. Do I use an **admissible heuristic** to direct search efforts towards the goal?
4. Do I use data structures which avoid unnecessarily slow lookups?
When you can answer "yes" to all of these questions, submit by pressing the Submit button in the lower right!_____no_output_____
<code>
from test import test
test(PathPlanner)All tests pass! Congratulations!
</code>
## Questions
**Instructions** Answer the following questions in your own words. We do not you expect you to know all of this knowledge on the top of your head. We expect you to do research and ask question. However do not merely copy and paste the answer from a google or stackoverflow. Read the information and understand it first. Then use your own words to explain the answer._____no_output_____- How would you explain A-Star to a family member(layman)?
** ANSWER **:
A-star algorithm has a brain/extra knowledge which helps in making smart choice at each step and thereby leading to destination without exploring much unwanted paths_____no_output_____- How does A-Star search algorithm differ from Uniform cost search? What about Best First search?
** ANSWER **:
A-star algorithm has uses f which is sum of (each step cost(g) + estimated goal cost(h)), A-star has extra knowledge/information about goal.
Uniform cost search it keep on exploring nodes in uniform way in each direction, which slows down search.
Best First Search is like A-star without extra knowledge/brain, it keeps on exploring neighboring nodes with lowest cost until it leads to destination/goal._____no_output_____- What is a heuristic?
** ANSWER **:
A heuristic is a estimated movement cost from given node to the goal, it's usually a smart guess which is always less which is always less than actual cost from given node to the goal node._____no_output_____- What is a consistent heuristic?
** ANSWER **:
A heuristic is consistent if estimated cost from the current node to the goal is less than or equal to the the cost from the current node to a successor node, plus the estimated cost from the successor node to the goal_____no_output_____- What is a admissible heuristic?
** ANSWER **:
A heuristic is admissible if the estimated cost is never more than the actual cost from the current node to the goal node.
i.e. A heuristic function is admissible if it never overestimates the distance to the goal._____no_output_____- ___ admissible heuristic are consistent.
*CHOOSE ONE*
- All
- Some
- None
** ANSWER **:
Some_____no_output_____- ___ Consistent heuristic are admissible.
*CHOOSE ONE*
- All
- Some
- None
** ANSWER **:
All_____no_output_____
| {
"repository": "classofai/Route-Planner",
"path": "project_notebook.ipynb",
"matched_keywords": [
"STAR"
],
"stars": null,
"size": 146168,
"hexsha": "d0f28d700af9215093f2fc85ab6d93b59630f12d",
"max_line_length": 15208,
"avg_line_length": 50.0575342466,
"alphanum_fraction": 0.6344617153
} |
# Notebook from AntoniaLbg/leach-et-al-2022
Path: notebooks/parameter-tuning/default-experiment-metrics.ipynb
## Import dependencies_____no_output_____
<code>
import numpy as np
import sys
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import seaborn as sn
import scipy as sp
from tqdm import tqdm
import glob
from fair import *
from fair.scripts.data_retrieval import *
%matplotlib inline_____no_output_____
</code>
definition used to round output tables to given sig figs._____no_output_____
<code>
def round_to_sf(x,sf):
if x==0:
return 0
if np.isnan(x):
return '-'
else:
num= round(x, sf - int(np.floor(np.log10(abs(x)))))
if abs(num)>10**sf:
return str(int(num))
else:
return str(num)_____no_output_____
</code>
# I. Default parameter simulated concentrations
Here we run historical emissions to test how the default parameter set simulates the historical evolution of concentrations._____no_output_____
<code>
## first we view & create a latex table for the default parameter set:
default_params = get_gas_parameter_defaults()
params_table = default_params.default.T.sort_index().rename(dict(a1='$a_1$',a2='$a_2$',a3='$a_3$',a4='$a_4$',
tau1='$\tau_1$',tau2='$\tau_2$',tau3='$\tau_3$',tau4='$\tau_4$',
r0='$r_0$',rC='$r_u$',rT='$r_T$',rA='$r_a$',PI_conc='PI\_conc',
f1='$f_1$',f2='$f_2$',f3='$f_3$'),axis=1)
params_table.index.name='agent'
params_table.columns.name='parameter'
params_table.index = [x.replace('_','\_') for x in params_table.index]
params_table.applymap(lambda x:round_to_sf(x,2)).replace(np.nan,'')#.to_latex('../../docs/manuscript/tables/TabS2',escape=False,bold_rows=True)_____no_output_____
</code>
### data retrieval_____no_output_____#### concentrations
WMGHG concentrations are from the CMIP6 concentration dataset, [Meinshausen et al., 2017](https://www.geosci-model-dev.net/10/2057/2017/). For some species, these are extended using data from NOAA.
Reference:
Meinshausen, M., Vogel, E., Nauels, A., Lorbacher, K., Meinshausen, N., Etheridge, D. M., … Weiss, R. (2017). Historical greenhouse gas concentrations for climate modelling (CMIP6). Geoscientific Model Development, 10(5), 2057–2116. https://doi.org/10.5194/gmd-10-2057-2017_____no_output_____
<code>
import ftplib
## import concentrations from official CMIP6 timeseries:
CMIP6_conc_ftp = ftplib.FTP('data.iac.ethz.ch','anonymous')
CMIP6_conc_ftp.cwd('CMIP6/input4MIPs/UoM/GHGConc/CMIP/yr/atmos/UoM-CMIP-1-1-0/GHGConc/gr3-GMNHSH/v20160701')
CMIP6_ftp_list = [x for x in CMIP6_conc_ftp.nlst() if x[-3:]=='csv']
WMGHG_concs = pd.DataFrame(dict(zip(['_'.join(x.split('_')[3:-8]) for x in CMIP6_ftp_list],[pd.read_csv('ftp://data.iac.ethz.ch/CMIP6/input4MIPs/UoM/GHGConc/CMIP/yr/atmos/UoM-CMIP-1-1-0/GHGConc/gr3-GMNHSH/v20160701/'+x,usecols=[0,1],index_col=0).iloc[:,0] for x in CMIP6_ftp_list])))
WMGHG_concs = WMGHG_concs[[x for x in WMGHG_concs.columns if x[-2:]!='eq']] # remove "equivalent" concentrations
WMGHG_concs['halon1202'] = 0
WMGHG_concs.loc[1765:2014,'halon1202'] = pd.read_csv('http://www.pik-potsdam.de/~mmalte/rcps/data/RCP45_MIDYEAR_CONCENTRATIONS.DAT',skiprows=38,delim_whitespace=True,index_col=0)['HALON1202'].loc[1765:2014].values_____no_output_____## we extend CO2, CH4 & N2O out to 2019 using the NOAA ESRL data
NOAA_molefrac = pd.read_csv('https://www.esrl.noaa.gov/gmd/aggi/NOAA_MoleFractions_2020.csv',skiprows=2,index_col=0,skipfooter=5).iloc[1:].replace('nd',np.nan).apply(pd.to_numeric).rename(dict(CO2='carbon_dioxide',CH4='methane',N2O='nitrous_oxide'),axis=1)
WMGHG_concs = WMGHG_concs.reindex(np.arange(2020))
for species in ['carbon_dioxide','methane','nitrous_oxide']:
# scale the NOAA data to join seamlessly (scale factors are almost exactly 1)
scale_factor = WMGHG_concs.loc[2010:2014,species].mean() / NOAA_molefrac.loc[2010:2015,species].mean()
WMGHG_concs.loc[2015:2019,species] = NOAA_molefrac.loc[2015:2020,species].values * scale_factor/nfs/a65/pmcjs/miniconda3/envs/leach2021/lib/python3.7/site-packages/ipykernel_launcher.py:2: ParserWarning: Falling back to the 'python' engine because the 'c' engine does not support skipfooter; you can avoid this warning by specifying engine='python'.
WMGHG_concs.drop(np.arange(1750),inplace=True)_____no_output_____# rescale all GHGs to be in ppb (bar CO2)
WMGHG_concs[WMGHG_concs.columns.drop(['carbon_dioxide','methane','nitrous_oxide'])] *= 1/1000_____no_output_____
</code>
#### emissions & forcing
Emissions & external forcing are taken from the RCMIP protocol.
Reference:
Nicholls, Z. R. J., Meinshausen, M., Lewis, J., Gieseke, R., Dommenget, D., Dorheim, K., … Xie, Z. (2020). Reduced complexity model intercomparison project phase 1: Protocol, results and initial observations. Geoscientific Model Development Discussions, 1–33. https://doi.org/10.5194/gmd-2019-375_____no_output_____
<code>
## emissions
def get_SSP_emms(ssp):
emms = RCMIP_to_FaIR_input_emms(ssp).interpolate().loc[1750:2100]
rebase_species = ['so2','nox','co','nmvoc','bc','nh3','oc','nox_avi','methyl_bromide','methyl_chloride','chcl3','ch2cl2']
emms.loc[:,rebase_species] -= emms.loc[1750,rebase_species]
return emms
choose_ssps=['ssp119','ssp126','ssp245','ssp370','ssp585']
SSP_emms = pd.concat([get_SSP_emms(x) for x in choose_ssps],axis=1,keys=choose_ssps)_____no_output_____## forcing
SSP_forc = pd.concat([get_RCMIP_forc(x) for x in choose_ssps],axis=1,keys=choose_ssps).loc[:2100]_____no_output_____
</code>
## run the model!_____no_output_____
<code>
default_SSP_run = run_FaIR(emissions_in=SSP_emms,forcing_in=SSP_forc)Integrating 5 scenarios, 1 gas cycle parameter sets, 1 thermal response parameter sets, over ['bc', 'bc|aci', 'bc|bc_on_snow', 'c2f6', 'c3f8', 'c4f10', 'c5f12', 'c6f14', 'c7f16', 'c8f18', 'c_c4f8', 'carbon_dioxide', 'carbon_tetrachloride', 'carbon_tetrachloride|o3', 'cf4', 'cfc11', 'cfc113', 'cfc113|o3', 'cfc114', 'cfc114|o3', 'cfc115', 'cfc115|o3', 'cfc11|o3', 'cfc12', 'cfc12|o3', 'ch2cl2', 'ch2cl2|o3', 'ch3ccl3', 'ch3ccl3|o3', 'chcl3', 'chcl3|o3', 'co', 'co|o3', 'halon1202', 'halon1202|o3', 'halon1211', 'halon1211|o3', 'halon1301', 'halon1301|o3', 'halon2402', 'halon2402|o3', 'hcfc141b', 'hcfc141b|o3', 'hcfc142b', 'hcfc142b|o3', 'hcfc22', 'hcfc22|o3', 'hfc125', 'hfc134a', 'hfc143a', 'hfc152a', 'hfc227ea', 'hfc23', 'hfc236fa', 'hfc245fa', 'hfc32', 'hfc365mfc', 'hfc4310mee', 'methane', 'methane|strat_h2o', 'methane|o3', 'methyl_bromide', 'methyl_bromide|o3', 'methyl_chloride', 'methyl_chloride|o3', 'nf3', 'nh3', 'nitrous_oxide', 'nitrous_oxide|o3', 'nmvoc', 'nmvoc|o3', 'nox', 'nox_avi', 'nox_avi|contrails', 'nox|o3', 'oc', 'oc|aci', 'sf6', 'so2', 'so2f2', 'so2|aci'] forcing agents, between 1750 and 2100...
</code>
## plot the results_____no_output_____
<code>
## get MAGICC7.1.0 data to benchmark
MAGICC_defaults = pd.read_csv('../../aux/input-data/RCMIP/data_results_phase-1_magicc7_rcmip_phase-1_magicc7.1.0.beta_v1-0-0.csv').drop(['Model','Unit','Climatemodel','Region'],axis=1).set_index(['Scenario','Variable']).reindex(['esm-'+x+'-allGHG' for x in choose_ssps],level=0)
RCMIP_outputmap = pd.read_csv('../../aux/FaIRv2.0.0-alpha_RCMIP_inputmap.csv',index_col=0)
MAGICC_defaults = MAGICC_defaults.rename(RCMIP_outputmap.reset_index().set_index('RCMIP_concs_key')['index'].to_dict(),level=1).reindex(RCMIP_outputmap.index,level=1).T
MAGICC_defaults.index = MAGICC_defaults.index.astype(int)
MAGICC_defaults.rename(dict(zip(['esm-'+x+'-allGHG' for x in choose_ssps],choose_ssps)),axis=1,level=0,inplace=True)_____no_output_____## get FaIRv1.5 data to benchmark
FaIR_defaults = pd.concat([pd.read_csv('../../aux/input-data/RCMIP/rcmip-master-data-results-phase-1-fair/data/results/phase-1/fair/rcmip_phase-1_fair-1.5-default-'+x+'_v1-0-1.csv') for x in ['esm-'+x+'-allGHG' for x in choose_ssps]]).drop(['Model','Unit','Climatemodel','Region'],axis=1).set_index(['Scenario','Variable'])
FaIR_defaults = FaIR_defaults.rename(RCMIP_outputmap.reset_index().set_index('RCMIP_concs_key')['index'].to_dict(),level=1).reindex(RCMIP_outputmap.index,level=1).T
FaIR_defaults.index = [int(x[:4]) for x in FaIR_defaults.index]
FaIR_defaults.rename(dict(zip(['esm-'+x+'-allGHG' for x in choose_ssps],choose_ssps)),axis=1,level=0,inplace=True)_____no_output_____## set plot rcParams
matplotlib.rcParams['font.family']='Helvetica'
matplotlib.rcParams['font.size']=11
matplotlib.rcParams['axes.formatter.limits']=-3,3
matplotlib.rcParams['legend.frameon']=False
plt.rcParams['pdf.fonttype'] = 42_____no_output_____## & plot!
colors= {'ssp245':'#7570b3','ssp370':'#d95f02','ssp585':'#e7298a','ssp119':'#66a61e','ssp126':'#1b9e77','history':'grey'}
map_conc_names = dict(zip(WMGHG_concs.columns,['C$_2$F$_6$','C$_3$F$_8$','C$_4$F$_{10}$','C$_5$F$_{12}$','C$_6$F$_{14}$','C$_7$F$_{16}$','C$_8$F$_{18}$','cC$_4$F$_{8}$','CO$_2$','CCl$_4$','CF$_4$','CFC113','CFC114','CFC115','CFC11','CFC12','CH$_2$Cl$_2$','CH$_3$CCl$_3$','CHCl$_3$','Halon1211','Halon1301','Halon2402','HCFC141b', 'HCFC142b', 'HCFC22', 'HFC125','HFC134a', 'HFC143a', 'HFC152a', 'HFC227ea', 'HFC236fa', 'HFC23','HFC245fa', 'HFC32', 'HFC365mfc', 'HFC4310mee','CH$_4$','CH$_3$Br','CH$_3$Cl','NF$_3$','N$_2$O','SF$_6$','SO$_2$F$_2$','Halon1202']))
fig,ax = plt.subplots(8,6,figsize=(15,15))
with plt.rc_context({"lines.linewidth": 0.75,"lines.markersize":4,"lines.markerfacecolor":'none',"lines.markeredgewidth":0.5}):
for i,gas in enumerate(WMGHG_concs.columns):
ax.flatten()[i].plot(WMGHG_concs.loc[1850:,gas].iloc[::10],'o',color='k')
for ssp in choose_ssps:
ax.flatten()[i].plot(default_SSP_run['C'].loc[2014:2100,(ssp,'default',gas)],color=colors[ssp],label=ssp)
ax.flatten()[i].plot(MAGICC_defaults.loc[2014:2100,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors[ssp],ls=':')
try: # need exceptions for FaIR as not all gases were included as this point.
ax.flatten()[i].plot(FaIR_defaults.loc[2014:2100,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors[ssp],ls='-.')
except:
pass
ax.flatten()[i].plot(default_SSP_run['C'].loc[1850:2014,('ssp245','default',gas)],color=colors['history'],label='historical')
ax.flatten()[i].plot(MAGICC_defaults.loc[1850:2014,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors['history'],ls=':')
try:
ax.flatten()[i].plot(FaIR_defaults.loc[1850:2014,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors['history'],ls='-.')
except:
pass
ax.flatten()[i].text(0.5,0.98,map_conc_names[gas],transform=ax.flatten()[i].transAxes,va='bottom',ha='center',fontsize=12,fontweight='bold')
if gas in ['carbon_dioxide','methane','nitrous_oxide']:
ax1 = inset_axes(ax.flatten()[i],width="100%",height="100%",bbox_to_anchor=(0.05,0.43,0.5,0.6),bbox_transform=ax.flatten()[i].transAxes)
ax1.plot(default_SSP_run['C'].loc[1850:2014,('ssp245','default',gas)],color=colors['history'])
ax1.plot(MAGICC_defaults.loc[1850:2014,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors['history'],ls=':')
ax1.plot(FaIR_defaults.loc[1850:2014,(ssp,gas)]*RCMIP_outputmap.loc[gas,'RCMIP_concs_scaling'],color=colors['history'],ls='-.')
ax1.plot(WMGHG_concs.loc[1850:,gas].iloc[::10],'o',color='k')
ax1.set_xticklabels([])
ax1.tick_params(left=False,labelleft=False,right=True,labelright=True)
ax1.ticklabel_format(axis='y',style="plain")
ax1.set_xlim(1850,2014)
[a.tick_params(labelbottom=False) for a in ax.flatten()]
[a.tick_params(labelbottom=True) for a in ax.flatten()[-11:]]
[a.ticklabel_format(style="plain") for a in ax.flatten()[-11:]]
[a.set_xlabel('year') for a in ax.flatten()[-11:]]
[a.set_xlim(1850,2100) for a in ax.flatten()]
[a.spines[pos].set_visible(False) for pos in ['right','top'] for a in ax.flatten()]
ax.flatten()[-6].plot([],[],'k',label='FaIRv2.0.0')
ax.flatten()[-6].plot([],[],'k:',label='MAGICC7.1.0-beta')
ax.flatten()[-6].plot([],[],'k-.',label='FaIRv1.5')
# fig.subplots_adjust(hspace=0.1)
plt.tight_layout(h_pad=0,w_pad=0)
ax.flatten()[-6].legend(loc=(1.05,0),labelspacing=0.1,prop={'size':9})
[a.set_visible(False) for a in ax.flatten()[-5:]]
[fig.savefig('../../docs/manuscript/figures/Fig2.'+x,dpi=600,bbox_inches='tight') for x in ['png','pdf']]
''/nfs/a65/pmcjs/miniconda3/envs/leach2021/lib/python3.7/site-packages/ipykernel_launcher.py:58: UserWarning: This figure includes Axes that are not compatible with tight_layout, so results might be incorrect.
findfont: Font family ['Helvetica'] not found. Falling back to DejaVu Sans.
findfont: Font family ['Helvetica'] not found. Falling back to DejaVu Sans.
findfont: Font family ['Helvetica'] not found. Falling back to DejaVu Sans.
</code>
# I. Default parameter metrics
Here we compute GWP values for each gas in the FaIRv2.0.0-alpha namelist; under a scenario of concentrations fixed at their present day (2014) levels. These include the impact due to all forcing (direct through radiative effects + indirect through any atmospheric chemistry)._____no_output_____
<code>
historical_concrun = WMGHG_concs.dropna().copy()
## add in aerosol emissions
aer_species = ['so2','nox','co','nmvoc','bc','nh3','oc','nox_avi']
historical_concrun = pd.concat([historical_concrun,get_SSP_emms('ssp245').loc[:2014,aer_species]],axis=1)
historical_concrun = pd.concat([historical_concrun],axis=1,keys=['historical'])
historical_forc = pd.concat([get_RCMIP_forc('ssp245').loc[historical_concrun.index]],axis=1,keys=['historical'])
## extend both series into the future, but fixed @ 2014 levels
historical_concrun = historical_concrun.reindex(np.arange(1750,2516)).interpolate(limit=501,limit_direction='forward')
historical_forc = historical_forc.reindex(np.arange(1750,2516)).interpolate(limit=501,limit_direction='forward')_____no_output_____## concentration-driven run over history
hist_run = run_FaIR(concentrations_in=historical_concrun, forcing_in=historical_forc, aer_concs_in=aer_species)Integrating 1 scenarios, 1 gas cycle parameter sets, 1 thermal response parameter sets, over ['bc', 'bc|aci', 'bc|bc_on_snow', 'c2f6', 'c3f8', 'c4f10', 'c5f12', 'c6f14', 'c7f16', 'c8f18', 'c_c4f8', 'carbon_dioxide', 'carbon_tetrachloride', 'carbon_tetrachloride|o3', 'cf4', 'cfc11', 'cfc113', 'cfc113|o3', 'cfc114', 'cfc114|o3', 'cfc115', 'cfc115|o3', 'cfc11|o3', 'cfc12', 'cfc12|o3', 'ch2cl2', 'ch2cl2|o3', 'ch3ccl3', 'ch3ccl3|o3', 'chcl3', 'chcl3|o3', 'co', 'co|o3', 'halon1202', 'halon1202|o3', 'halon1211', 'halon1211|o3', 'halon1301', 'halon1301|o3', 'halon2402', 'halon2402|o3', 'hcfc141b', 'hcfc141b|o3', 'hcfc142b', 'hcfc142b|o3', 'hcfc22', 'hcfc22|o3', 'hfc125', 'hfc134a', 'hfc143a', 'hfc152a', 'hfc227ea', 'hfc23', 'hfc236fa', 'hfc245fa', 'hfc32', 'hfc365mfc', 'hfc4310mee', 'methane', 'methane|strat_h2o', 'methane|o3', 'methyl_bromide', 'methyl_bromide|o3', 'methyl_chloride', 'methyl_chloride|o3', 'nf3', 'nh3', 'nitrous_oxide', 'nitrous_oxide|o3', 'nmvoc', 'nmvoc|o3', 'nox', 'nox_avi', 'nox_avi|contrails', 'nox|o3', 'oc', 'oc|aci', 'sf6', 'so2', 'so2f2', 'so2|aci'] forcing agents, between 1750 and 2515...
## obtain corresponding emissions & reset aerosol emissions
hist_emms = hist_run['Emissions'].droplevel(axis=1,level=1)
hist_emms.loc[:2014,('historical',aer_species)] = get_SSP_emms('ssp245').loc[:2014,aer_species].values
hist_emms.loc[2015:,('historical',aer_species)] = hist_emms.loc[2014,('historical',aer_species)].values_____no_output_____## run emissions to check consistency
hist_run_emms = run_FaIR(emissions_in=hist_emms, forcing_in=historical_forc)Integrating 1 scenarios, 1 gas cycle parameter sets, 1 thermal response parameter sets, over ['bc', 'bc|aci', 'bc|bc_on_snow', 'c2f6', 'c3f8', 'c4f10', 'c5f12', 'c6f14', 'c7f16', 'c8f18', 'c_c4f8', 'carbon_dioxide', 'carbon_tetrachloride', 'carbon_tetrachloride|o3', 'cf4', 'cfc11', 'cfc113', 'cfc113|o3', 'cfc114', 'cfc114|o3', 'cfc115', 'cfc115|o3', 'cfc11|o3', 'cfc12', 'cfc12|o3', 'ch2cl2', 'ch2cl2|o3', 'ch3ccl3', 'ch3ccl3|o3', 'chcl3', 'chcl3|o3', 'co', 'co|o3', 'halon1202', 'halon1202|o3', 'halon1211', 'halon1211|o3', 'halon1301', 'halon1301|o3', 'halon2402', 'halon2402|o3', 'hcfc141b', 'hcfc141b|o3', 'hcfc142b', 'hcfc142b|o3', 'hcfc22', 'hcfc22|o3', 'hfc125', 'hfc134a', 'hfc143a', 'hfc152a', 'hfc227ea', 'hfc23', 'hfc236fa', 'hfc245fa', 'hfc32', 'hfc365mfc', 'hfc4310mee', 'methane', 'methane|strat_h2o', 'methane|o3', 'methyl_bromide', 'methyl_bromide|o3', 'methyl_chloride', 'methyl_chloride|o3', 'nf3', 'nh3', 'nitrous_oxide', 'nitrous_oxide|o3', 'nmvoc', 'nmvoc|o3', 'nox', 'nox_avi', 'nox_avi|contrails', 'nox|o3', 'oc', 'oc|aci', 'sf6', 'so2', 'so2f2', 'so2|aci'] forcing agents, between 1750 and 2515...
## run over each gas species, perturbing each by 1t in 2015
gas_mass_conversion_factors = pd.Series(index=hist_emms.columns.levels[1],dtype=float)
gas_mass_conversion_factors.loc[:] = 1
gas_mass_conversion_factors.loc['carbon_dioxide'] = (1/1000)/(44.01/12.01)
gas_mass_conversion_factors.loc['nitrous_oxide'] = 28/44
rf_results = []
for gas_species in hist_emms.columns.levels[1]:
pert_emms = hist_emms.copy()
pert_emms.loc[2015,('historical',gas_species)] += gas_mass_conversion_factors.loc[gas_species]/1e6
pert_result = run_FaIR(emissions_in=pert_emms, forcing_in=historical_forc, show_run_info=False)
rf_results += [(pert_result['RF'].loc[:,('historical','default','Total')]-hist_run_emms['RF'].loc[:,('historical','default','Total')]).rename(gas_species)]
rf_results = pd.concat(rf_results,axis=1)100%|██████████| 765/765 [00:00<00:00, 2395.30 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2411.74 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2406.14 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2410.23 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2408.74 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2395.27 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2402.66 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2401.23 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2397.19 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2403.85 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2416.76 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2413.48 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2413.65 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2410.79 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2399.82 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2402.50 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2410.87 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2410.36 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2406.90 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2393.91 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2401.53 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2388.75 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2395.24 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2407.69 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2405.70 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2394.59 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2396.24 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2413.20 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2402.47 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2386.94 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2404.02 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2404.06 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2398.38 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2397.07 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2402.42 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2397.61 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2410.40 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2409.80 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2398.35 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2409.12 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2403.94 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2394.80 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2407.36 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2396.37 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2397.04 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2408.10 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2397.88 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2400.70 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2416.45 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2403.86 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2398.85 timestep/s]
100%|██████████| 765/765 [00:00<00:00, 2407.93 timestep/s]
AGWP = rf_results.cumsum().loc[2015+np.array([5,10,20,50,100,500])]
AGWP.index = np.array([5,10,20,50,100,500])
GWP = AGWP.apply(lambda x: x/AGWP.carbon_dioxide)_____no_output_____print('GWP value over various timescales:')
GWP.index.name = 'timescale / years'
GWP.columns.name = 'agent'
GWP.T.applymap(lambda x:round_to_sf(x,2))#.to_latex('../../docs/manuscript/tables/TabS3',escape=True,bold_rows=True)GWP value over various timescales:
</code>
# Supplement I. Methane lifetime over history + RCP8.5 extension
A demonstration of the state-dependent lifetime of methane over RCP history + extended to 2100 with RCP8.5. We use RCP8.5 since this is (at least, appears to be) the most commonly discussed scenario in methane sensitivity literature._____no_output_____
<code>
RCP85_emms = RCMIP_to_FaIR_input_emms('rcp85').dropna(how='all').dropna(axis=1,how='all')
RCP85_emms = pd.concat([RCP85_emms],axis=1,keys=['RCP8.5'])
rebase_species = ['so2','nox','co','nmvoc','bc','nh3','oc','nox_avi','methyl_bromide','methyl_chloride','chcl3','ch2cl2']
rebase_species = list(set(rebase_species).intersection(RCP85_emms.columns.levels[1]))
RCP85_emms.loc[:,('RCP8.5',rebase_species)] -= RCP85_emms.loc[1765,('RCP8.5',rebase_species)]
RCP85_forc = pd.concat([get_RCMIP_forc('rcp85',['Radiative Forcing|Anthropogenic|Albedo Change','Radiative Forcing|Natural']).dropna()],axis=1,keys=['RCP8.5'])_____no_output_____RCP85_run = run_FaIR(emissions_in=RCP85_emms,
forcing_in=RCP85_forc,
gas_parameters=get_gas_parameter_defaults().reindex(RCP85_emms.columns.levels[1],axis=1,level=1))Integrating 1 scenarios, 1 gas cycle parameter sets, 1 thermal response parameter sets, over ['bc', 'c2f6', 'c6f14', 'carbon_dioxide', 'carbon_tetrachloride', 'cf4', 'cfc11', 'cfc113', 'cfc114', 'cfc115', 'cfc12', 'ch3ccl3', 'co', 'halon1202', 'halon1211', 'halon1301', 'halon2402', 'hcfc141b', 'hcfc142b', 'hcfc22', 'hfc125', 'hfc134a', 'hfc143a', 'hfc227ea', 'hfc23', 'hfc245fa', 'hfc32', 'hfc4310mee', 'methane', 'methyl_bromide', 'methyl_chloride', 'nh3', 'nitrous_oxide', 'nmvoc', 'nox', 'oc', 'sf6', 'so2'] forcing agents, between 1765 and 2500...
CH4_lifetime = RCP85_run['alpha'].xs('methane',axis=1,level=2).droplevel(axis=1,level=1)*RCP85_run['gas_parameters'].loc['tau1',('default','methane')]_____no_output_____sn.lineplot(data=CH4_lifetime.loc[1850:2100],palette=['k'])
sn.despine()
plt.xlabel('year')
plt.ylabel('CH$_4$ lifetime / yrs')
plt.gca().ticklabel_format(style='plain')
plt.xlim(1850,2100)
[plt.savefig('../../docs/manuscript/figures/FigS2.'+x,dpi=600,bbox_inches='tight') for x in ['png','pdf']]
''_____no_output_____# comparison with Holmes et al., 2013
## 2010 values:
print('Holmes 2010:',1/(1/120+1/150+1/200+1/11.2))
print('FaIRv2.0.0-alpha 2010:',CH4_lifetime.loc[2010].values[0],end='\n\n')
print('Holmes 2010-2100 change:',(1/120+1/150+1/200+1/11.2)/(1/120+1/150+1/200+1/(11.2*1.129)))
print('FaIRv2.0.0-alpha 2010-2100 change:',(CH4_lifetime.loc[2100]/CH4_lifetime.loc[2010]).values[0])Holmes 2010: 9.15032679738562
FaIRv2.0.0-alpha 2010: 8.902796846929608
Holmes 2010-2100 change: 1.102961458892039
FaIRv2.0.0-alpha 2010-2100 change: 1.0991068995747364
</code>
# Supplement II. FaIRv2.0.0 additivity
Very brief test of how linear FaIR actually is. Non-linearity in FaIR only arises from the CO2 & CH4 cycles. The climate response of FaIR is linear in forcing. Here we test the linearity over history by carrying out several CO2 / CH4 pulse response experiments._____no_output_____
<code>
# default_SSP_run = run_FaIR(emissions_in=SSP_emms,forcing_in=SSP_forc)
base_emms = RCMIP_to_FaIR_input_emms('ssp245').interpolate().loc[1750:2500]
rebase_species = ['so2','nox','co','nmvoc','bc','nh3','oc','nox_avi','methyl_bromide','methyl_chloride','chcl3','ch2cl2']
base_emms.loc[:,rebase_species] -= base_emms.loc[1750,rebase_species]
base_emms = pd.concat([base_emms],axis=1,keys=['ssp245'])
experiments = []
# scale methane by 28 (GWP100) for closer comparison
pulse_scaling = dict(carbon_dioxide=12/44,methane=1000/28)
for species in ['carbon_dioxide','methane']:
for pulse_size in [0]+list(np.arange(0.01,0.1,0.01))+list(np.arange(0.1,1,0.1))+list(np.arange(1,10,1))+list(np.arange(10,100,10))+list(np.arange(100,1001,100)):
experiment = base_emms.copy()
experiment.loc[2019,('ssp245',species)] += pulse_size*pulse_scaling[species]
experiments += [experiment.rename(dict(ssp245=species+'_'+str(pulse_size)),axis=1,level=0)]
experiments = pd.concat(experiments,axis=1)_____no_output_____pulse_runs = run_FaIR(emissions_in=experiments,
forcing_in=pd.concat([get_RCMIP_forc('ssp245')]*experiments.columns.levels[0].size,axis=1,keys=experiments.columns.levels[0]))Integrating 94 scenarios, 1 gas cycle parameter sets, 1 thermal response parameter sets, over ['bc', 'bc|aci', 'bc|bc_on_snow', 'c2f6', 'c3f8', 'c4f10', 'c5f12', 'c6f14', 'c7f16', 'c8f18', 'c_c4f8', 'carbon_dioxide', 'carbon_tetrachloride', 'carbon_tetrachloride|o3', 'cf4', 'cfc11', 'cfc113', 'cfc113|o3', 'cfc114', 'cfc114|o3', 'cfc115', 'cfc115|o3', 'cfc11|o3', 'cfc12', 'cfc12|o3', 'ch2cl2', 'ch2cl2|o3', 'ch3ccl3', 'ch3ccl3|o3', 'chcl3', 'chcl3|o3', 'co', 'co|o3', 'halon1202', 'halon1202|o3', 'halon1211', 'halon1211|o3', 'halon1301', 'halon1301|o3', 'halon2402', 'halon2402|o3', 'hcfc141b', 'hcfc141b|o3', 'hcfc142b', 'hcfc142b|o3', 'hcfc22', 'hcfc22|o3', 'hfc125', 'hfc134a', 'hfc143a', 'hfc152a', 'hfc227ea', 'hfc23', 'hfc236fa', 'hfc245fa', 'hfc32', 'hfc365mfc', 'hfc4310mee', 'methane', 'methane|strat_h2o', 'methane|o3', 'methyl_bromide', 'methyl_bromide|o3', 'methyl_chloride', 'methyl_chloride|o3', 'nf3', 'nh3', 'nitrous_oxide', 'nitrous_oxide|o3', 'nmvoc', 'nmvoc|o3', 'nox', 'nox_avi', 'nox_avi|contrails', 'nox|o3', 'oc', 'oc|aci', 'sf6', 'so2', 'so2f2', 'so2|aci'] forcing agents, between 1750 and 2500...
</code>
### nonlinearities in terms of scaled anomalies_____no_output_____
<code>
## compute the pulse experiment anomalies relative to the baseline
pulse_temp_anomalies = (pulse_runs['T'] - pulse_runs['T'].carbon_dioxide_0.values)
pulse_temp_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in pulse_temp_anomalies.columns.levels[0]])
pulse_temp_anomalies = pulse_temp_anomalies.drop(0,axis=1,level=1)
pulse_temp_anomalies_scaled = pulse_temp_anomalies.apply(lambda x: x*1000/x.name[1])
CO2_RF_anomalies = (pulse_runs['RF'].xs('carbon_dioxide',axis=1,level=2) - pulse_runs['RF'].xs('carbon_dioxide',axis=1,level=2).carbon_dioxide_0.values)
CO2_RF_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CO2_RF_anomalies.columns.levels[0]])
CO2_RF_anomalies = CO2_RF_anomalies.drop(0,axis=1,level=1)
CO2_RF_anomalies_scaled = CO2_RF_anomalies.apply(lambda x: x*1000/x.name[1])
CH4_RF_anomalies = (pulse_runs['RF'].xs('methane',axis=1,level=2) - pulse_runs['RF'].xs('methane',axis=1,level=2).carbon_dioxide_0.values)
CH4_RF_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CH4_RF_anomalies.columns.levels[0]])
CH4_RF_anomalies = CH4_RF_anomalies.drop(0,axis=1,level=1)
CH4_RF_anomalies_scaled = CH4_RF_anomalies.apply(lambda x: x*1000/x.name[1])
CO2_C_anomalies = (pulse_runs['C'].xs('carbon_dioxide',axis=1,level=2) - pulse_runs['C'].xs('carbon_dioxide',axis=1,level=2).carbon_dioxide_0.values)
CO2_C_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CO2_C_anomalies.columns.levels[0]])
CO2_C_anomalies = CO2_C_anomalies.drop(0,axis=1,level=1)
CO2_C_anomalies_scaled = CO2_C_anomalies.apply(lambda x: x*1000/x.name[1])
CH4_C_anomalies = (pulse_runs['C'].xs('methane',axis=1,level=2) - pulse_runs['C'].xs('methane',axis=1,level=2).carbon_dioxide_0.values)
CH4_C_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CH4_C_anomalies.columns.levels[0]])
CH4_C_anomalies = CH4_C_anomalies.drop(0,axis=1,level=1)
CH4_C_anomalies_scaled = CH4_C_anomalies.apply(lambda x: x*1000/x.name[1])
CO2_alph_anomalies = pulse_runs['alpha'].xs('carbon_dioxide',axis=1,level=2).sub(pulse_runs['alpha'].xs('carbon_dioxide',axis=1,level=2).carbon_dioxide_0,axis=0)
CO2_alph_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CO2_alph_anomalies.columns.levels[0]])
CO2_alph_anomalies = CO2_alph_anomalies.drop(0,axis=1,level=1)
CO2_alph_anomalies_scaled = CO2_alph_anomalies.apply(lambda x: x*1000/x.name[1])
CH4_alph_anomalies = pulse_runs['alpha'].xs('methane',axis=1,level=2).sub(pulse_runs['alpha'].xs('methane',axis=1,level=2).carbon_dioxide_0,axis=0)
CH4_alph_anomalies.columns = pd.MultiIndex.from_tuples([(x[:14],float(x[15:])) if x[0]=='c' else (x[:7],float(x[8:])) for x in CH4_alph_anomalies.columns.levels[0]])
CH4_alph_anomalies = CH4_alph_anomalies.drop(0,axis=1,level=1)
CH4_alph_anomalies_scaled = CH4_alph_anomalies.apply(lambda x: x*1000/x.name[1])
anomalies = pd.concat([pulse_temp_anomalies_scaled,
CO2_RF_anomalies_scaled,
CH4_RF_anomalies_scaled,
CO2_C_anomalies_scaled,
CH4_C_anomalies_scaled,
CO2_alph_anomalies_scaled,
CH4_alph_anomalies_scaled],
axis=1,
keys=['T',r'RF$_{\mathrm{CO}_2}$',r'RF$_{\mathrm{CH}_4}$',r'C$_{\mathrm{CO}_2}$',r'C$_{\mathrm{CH}_4}$',r'$\alpha_{\mathrm{CO}_2}$',r'$\alpha_{\mathrm{CH}_4}$'],
names=['variable']).rename(dict(carbon_dioxide='CO$_2$',methane='CH$_4$'),axis=1,level=1).loc[2020:].sort_index(axis=1).stack(level=[0,1,2]).reset_index().rename({'level_0':'time','level_2':'pulse_type','level_3':'pulse_size',0:'value'},axis=1)
anomalies.time -= 2019
# set relative to small pulse limit
## comment out if absolute anomalies (ie. relative to reference) desired
pulse_temp_anomalies_scaled = pulse_temp_anomalies_scaled.apply(lambda x: x-pulse_temp_anomalies_scaled.loc[:,(x.name[0],0.01)])
CO2_RF_anomalies_scaled = CO2_RF_anomalies_scaled.apply(lambda x: x-CO2_RF_anomalies_scaled.loc[:,(x.name[0],0.01)])
CH4_RF_anomalies_scaled = CH4_RF_anomalies_scaled.apply(lambda x: x-CH4_RF_anomalies_scaled.loc[:,(x.name[0],0.01)])
CO2_C_anomalies_scaled = CO2_C_anomalies_scaled.apply(lambda x: x-CO2_C_anomalies_scaled.loc[:,(x.name[0],0.01)])
CH4_C_anomalies_scaled = CH4_C_anomalies_scaled.apply(lambda x: x-CH4_C_anomalies_scaled.loc[:,(x.name[0],0.01)])
CO2_alph_anomalies_scaled = CO2_alph_anomalies_scaled.apply(lambda x: x-CO2_alph_anomalies_scaled.loc[:,(x.name[0],0.01)])
CH4_alph_anomalies_scaled = CH4_alph_anomalies_scaled.apply(lambda x: x-CH4_alph_anomalies_scaled.loc[:,(x.name[0],0.01)])
anomalies_rel = pd.concat([pulse_temp_anomalies_scaled,
CO2_RF_anomalies_scaled,
CH4_RF_anomalies_scaled,
CO2_C_anomalies_scaled,
CH4_C_anomalies_scaled,
CO2_alph_anomalies_scaled,
CH4_alph_anomalies_scaled],
axis=1,
keys=['T',r'RF$_{\mathrm{CO}_2}$',r'RF$_{\mathrm{CH}_4}$',r'C$_{\mathrm{CO}_2}$',r'C$_{\mathrm{CH}_4}$',r'$\alpha_{\mathrm{CO}_2}$',r'$\alpha_{\mathrm{CH}_4}$'],
names=['variable']).rename(dict(carbon_dioxide='CO$_2$ - relative',methane='CH$_4$ - relative'),axis=1,level=1).loc[2020:].sort_index(axis=1).stack(level=[0,1,2]).reset_index().rename({'level_0':'time','level_2':'pulse_type','level_3':'pulse_size',0:'value'},axis=1)
anomalies_rel.time -= 2019_____no_output_____plot_df = pd.concat([anomalies,anomalies_rel])
plot_df.head()_____no_output_____g=sn.FacetGrid(plot_df.query('pulse_size in [1,10,100,200,500,1000]').sort_values(['pulse_type','variable']),col='variable',row='pulse_type',hue='pulse_size',palette=[(x,x,x) for x in np.arange(0,1,1/7)[::-1]],margin_titles=True,sharey=False)
g.map(sn.lineplot,'time','value')
g.set_titles(col_template="{col_name}",row_template='pulse type = {row_name}',fontweight='bold').set(xlim=[0,480])
[a.set_ylabel('anomaly / ppb') for a in g.axes[:,2]]
[a.set_ylabel('anomaly / ppm') for a in g.axes[:,3]]
[a.set_ylabel('anomaly / W m$^{-2}$') for a in g.axes[:,4]]
[a.set_ylabel('anomaly / K') for a in g.axes[:,-1]]
[a.set_ylabel('anomaly / -') for a in g.axes[:,0]]
g.axes[0,0].legend(title='pulse size / GtCO$_2$-eq')
[plt.savefig('../../docs/manuscript/figures/FigS3.'+x,dpi=600,bbox_inches='tight') for x in ['png','pdf']]
''findfont: Font family ['Helvetica'] not found. Falling back to DejaVu Sans.
</code>
### measuring nonlinearities in a relative sense:
Marked out to prevent from running._____no_output_____## measuring extent of nonlinearity as anomalies relative to 1000 GtC-eq pulse, normalised by 1000 GtC-eq pulse anomaly
CO2_T_nonlin = pulse_temp_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(pulse_temp_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(pulse_temp_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CH4_T_nonlin = pulse_temp_anomalies_scaled.loc[2020:,'methane'].sub(pulse_temp_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(pulse_temp_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CO2_CO2_RF_nonlin = CO2_RF_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CO2_RF_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CO2_RF_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CO2_CH4_RF_nonlin = CO2_RF_anomalies_scaled.loc[2020:,'methane'].sub(CO2_RF_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CO2_RF_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CH4_CO2_RF_nonlin = CH4_RF_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CH4_RF_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CH4_RF_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CH4_CH4_RF_nonlin = CH4_RF_anomalies_scaled.loc[2020:,'methane'].sub(CH4_RF_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CH4_RF_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CO2_CO2_C_nonlin = CO2_C_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CO2_C_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CO2_C_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CO2_CH4_C_nonlin = CO2_C_anomalies_scaled.loc[2020:,'methane'].sub(CO2_C_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CO2_C_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CH4_CO2_C_nonlin = CH4_C_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CH4_C_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CH4_C_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CH4_CH4_C_nonlin = CH4_C_anomalies_scaled.loc[2020:,'methane'].sub(CH4_C_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CH4_C_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CO2_CO2_alph_nonlin = CO2_alph_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CO2_alph_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CO2_alph_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CO2_CH4_alph_nonlin = CO2_alph_anomalies_scaled.loc[2020:,'methane'].sub(CO2_alph_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CO2_alph_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
CH4_CO2_alph_nonlin = CH4_alph_anomalies_scaled.loc[2020:,'carbon_dioxide'].sub(CH4_alph_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0).div(CH4_alph_anomalies_scaled.loc[2020:,('carbon_dioxide',1000)],axis=0)
CH4_CH4_alph_nonlin = CH4_alph_anomalies_scaled.loc[2020:,'methane'].sub(CH4_alph_anomalies_scaled.loc[2020:,('methane',1000)],axis=0).div(CH4_alph_anomalies_scaled.loc[2020:,('methane',1000)],axis=0)
nonlinearities = pd.concat([pd.concat([CO2_T_nonlin,CH4_T_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CO2_CO2_RF_nonlin,CO2_CH4_RF_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CH4_CO2_RF_nonlin,CH4_CO2_RF_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CO2_CO2_C_nonlin,CO2_CH4_C_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CH4_CO2_C_nonlin,CH4_CH4_C_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CO2_CO2_alph_nonlin,CO2_CH4_alph_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type']),
pd.concat([CH4_CO2_alph_nonlin,CH4_CH4_alph_nonlin],axis=1,keys=['CO2','CH4'],names=['pulse_type'])],
axis=1,
keys=['T','RF$_{\text{CO}_2}$','RF$_{\text{CH}_4}$','C$_{\text{CO}_2}$','C$_{\text{CH}_4}$','$\alpha_{\text{CO}_2}$','$\alpha_{\text{CH}_4}$'],
names=['variable']).sort_index(axis=1).stack(level=[0,1,2]).reset_index().rename({'level_0':'time','level_3':'pulse_size',0:'value'},axis=1)
nonlinearities.time -= 2019_____no_output_____from mpl_toolkits.axes_grid1.inset_locator import inset_axes
class SymPowerNorm(matplotlib.colors.Normalize):
def __init__(self, vmin=None, vmax=None, order=1, clip=False):
self.order = order
matplotlib.colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [abs(self.vmin) / self.vmin * abs(self.vmin)**self.order , abs(self.vmax) / self.vmax * abs(self.vmax)**self.order], [0,1]
return np.ma.masked_array(np.interp(abs(value) / value * abs(value)**self.order, x, y))
def mapplot(x,y,z,**kwargs):
data = pd.concat([x,y,z],axis=1).set_index(['time','pulse_size']).unstack().droplevel(0,axis=1)
norm=matplotlib.colors.Normalize(vmin=-0.5,vmax=0.5)#SymPowerNorm(order=1,vmin=-0.5,vmax=0.5)
plt.pcolormesh(data.index,data.columns,data.values.T,shading='auto',norm=norm,cmap='RdBu_r')
g=sn.FacetGrid(nonlinearities,col='variable',row='pulse_type',margin_titles=True,despine=False,gridspec_kws=dict(hspace=0.1,wspace=0.1))
g.map(mapplot,'time','pulse_size','value')
g.set_titles(col_template="{col_name}",row_template='pulse type = {row_name}',fontweight='bold')
g.set(yscale='log')
[a.set_ylabel('pulse size / GtC-eq') for a in g.axes[:,0]]
[a.set_xlabel('year') for a in g.axes[-1,:]]
axins = inset_axes(g.axes[-1,-1], width="5%",height="100%",loc='lower left',bbox_to_anchor=(1.2, 0.55, 1, 1),bbox_transform=g.axes[-1,-1].transAxes,borderpad=0)
plt.colorbar(cax=axins,extend='both')_____no_output_____
| {
"repository": "AntoniaLbg/leach-et-al-2022",
"path": "notebooks/parameter-tuning/default-experiment-metrics.ipynb",
"matched_keywords": [
"evolution"
],
"stars": 1,
"size": 686642,
"hexsha": "d0f300cab3843427cfb667f4efb5c17336542c5c",
"max_line_length": 291660,
"avg_line_length": 329.9577126382,
"alphanum_fraction": 0.9023129957
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.