Following instructions from the excellent https://www.rinkeby.io/
A full node lets you access all state. There is a light node (state-on-demand) and wallet-only (no state) instructions as well,
Following instructions from the excellent https://www.rinkeby.io/
A full node lets you access all state. There is a light node (state-on-demand) and wallet-only (no state) instructions as well,
| def _isotonic_regression(np.ndarray[DOUBLE, ndim=1] y, | |
| np.ndarray[DOUBLE, ndim=1] weight, | |
| np.ndarray[DOUBLE, ndim=1] solution): | |
| cdef: | |
| Py_ssize_t current, i | |
| unsigned int len_active_set | |
| DOUBLE v, w | |
| len_active_set = y.shape[0] |
| # Alec Radford, Indico, Kyle Kastner | |
| # License: MIT | |
| """ | |
| Convolutional VAE in a single file. | |
| Bringing in code from IndicoDataSolutions and Alec Radford (NewMu) | |
| Additionally converted to use default conv2d interface instead of explicit cuDNN | |
| """ | |
| import theano | |
| import theano.tensor as T | |
| from theano.compat.python2x import OrderedDict |
| import numpy as np | |
| from keras.models import Sequential | |
| from keras.layers import Dense, Dropout, Activation, Flatten | |
| from keras.layers import Convolution2D, MaxPooling2D | |
| from keras.optimizers import SGD | |
| from keras.regularizers import l2, activity_l2 | |
| from keras.utils import np_utils | |
| from sklearn import metrics | |
| # to run this code, you'll need to load the following data: |
| Latency Comparison Numbers | |
| -------------------------- | |
| L1 cache reference 0.5 ns | |
| Branch mispredict 5 ns | |
| L2 cache reference 7 ns 14x L1 cache | |
| Mutex lock/unlock 25 ns | |
| Main memory reference 100 ns 20x L2 cache, 200x L1 cache | |
| Compress 1K bytes with Zippy 3,000 ns 3 us | |
| Send 1K bytes over 1 Gbps network 10,000 ns 10 us | |
| Read 4K randomly from SSD* 150,000 ns 150 us ~1GB/sec SSD |