超大batch_size

This commit is contained in:
Zchen
2025-10-16 01:57:19 +08:00
parent 69a7285886
commit 25561a7615
2 changed files with 20 additions and 1 deletions

View File

@@ -0,0 +1,19 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "acb1482e",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -74,7 +74,7 @@ dataset:
smooth_kernel_std: 2 # standard deviation of the smoothing kernel applied to the data
neural_dim: 512 # dimensionality of the neural data
batch_size: 32 # batch size for training (reduced for TPU memory constraints)
batch_size: 1024 # batch size for training (reduced for TPU memory constraints)
n_classes: 41 # number of classes (phonemes) in the dataset
max_seq_elements: 500 # maximum number of sequence elements (phonemes) for any trial
days_per_batch: 4 # number of randomly-selected days to include in each batch