Slide 24
Slide 24 text
ୈ 2 ষ SimCLR Λ TensorFlow2 Ͱ࣮ͯ͠Έͨ 2.3 SimCLR ͷ࣮
for (x_train, y_train) in train_dataset:
train_step(model, optimizer, x_train, y_train)
with train_summary_writer.as_default():
tf.summary.scalar(’loss’, train_loss.result(), step=epoch)
tf.summary.scalar(’accuracy’, train_accuracy.result(), step=epoch)
for (x_test, y_test) in test_dataset:
test_step(model, x_test, y_test)
with test_summary_writer.as_default():
tf.summary.scalar(’loss’, test_loss.result(), step=epoch)
tf.summary.scalar(’accuracy’, test_accuracy.result(), step=epoch)
template = ’Epoch {}, Loss: {}, Accuracy: {}, Test Loss: {}, Test Accuracy: {}’
print (template.format(epoch+1,
train_loss.result(),
train_accuracy.result()*100,
test_loss.result(),
test_accuracy.result()*100))
# Reset metrics every epoch
train_loss.reset_states()
test_loss.reset_states()
train_accuracy.reset_states()
test_accuracy.reset_states()
ͦͷଞίʔυͷڞ௨Խ͕͍͢͠ͳͲͷཧ༝ؚΊɺtf.keras.Model ʹد࣮ͤͨʹ͍͖ͯ͠
·͢ɻ
2.3 SimCLR ͷ࣮
ͦΕͰɺSimCLR ͷ࣮ͷʹೖΓ·͢ɻtf.keras.Model ͷαϒΫϥεΛ࡞͠ɺҎԼͷΑ
͏ʹ train_step ΛΦʔόʔϥΠυ͠·͢ɻ
Ϧετ 2.1: SimCLRModel Ϋϥεͷ࣮
class SimCLRModel(tf.keras.Model):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.data_augmentation = CustomAugment()
self.global_batch_size = None
self.emb_dim = None
@tf.function
def merge_fn(self, _, per_replica_res):
return self.distribute_strategy.reduce(
tf.distribute.ReduceOp.SUM, per_replica_res, axis=None)
def train_step(self, x):
17