Last active
December 1, 2019 07:04
-
-
Save tokestermw/736dc2b629f9e3a6a53c19d345eccb98 to your computer and use it in GitHub Desktop.
Revisions
-
tokestermw revised this gist
Dec 1, 2019 . 1 changed file with 17 additions and 13 deletions.There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -2,26 +2,30 @@ def augmentation_fun(x, augment_by=3): # augment the original data point by 3 return [x + random.random() * 2 - 1 for i in range(augment_by)] def train_loop(dataset, do_augment=False): # emit one data point at a time for x in dataset: # ... preprocess, etc. if do_augment: yield from augmentation_fun(x) else: yield x if __name__ == "__main__": dataset = [1, 2, 3] for x in train_loop(dataset, do_augment=True): print(x) # 1.6910669464085353 # 0.6755471610373542 # 1.3990163430931686 # 1.0574266964192516 # 1.7474122098401583 # 2.8820668623545096 # 3.0684208172249834 # 2.0069288129912817 # 3.7746658889260774 -
tokestermw created this gist
Dec 1, 2019 .There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode charactersOriginal file line number Diff line number Diff line change @@ -0,0 +1,27 @@ import random def augmentation_fun(x, augment_by=3): return [x + random.random() for i in range(augment_by)] def train_loop(dataset): # emit one data point at a time for x in dataset: # ... preprocess, etc. yield from augmentation_fun(x) if __name__ == "__main__": dataset = [1, 2, 3] for x in train_loop(dataset): print(x) # 1.8743045582080011 # 1.1156374021713173 # 1.9762467469532101 # 2.658889884418666 # 2.350529257341205 # 2.428795321570352 # 3.9366101605189914 # 3.6662388553317062 # 3.285082043706509