TensorFlow——keras--报错+修改

    技术2026-01-16  7

    1

    mnist.load_data() 报错解决

    #导入数据集 fashion_mnist = keras.datasets.fashion_mnist (x_train_all, y_train_all),(x_test, y_test) = fashion_mnist.load_data()

    2

    Tensorflow2.0报错:ProfilerNotRunningError: Cannot stop profiling. No profiler is running.

    #TensorBoard(文件夹), EarlyStopping, ModelCheckpoint(文件名) #logdir = './callbacks' #定义文件夹 logdir = os.path.join("callbacks") if not os.path.exists(logdir): os.mkdir(logdir) #如果没有该文件,创建文件 output_model_file = os.path.join(logdir, "fashion_mnist_model.h5") #输出的文件也放在该文件夹下 callbacks = [ keras.callbacks.TensorBoard(logdir), keras.callbacks.ModelCheckpoint(output_model_file, save_best_only = True), #保存最好的模型,否则最近的模型 keras.callbacks.EarlyStopping(patience=5, min_delta=1e-3), ] # EarlyStopping的参数: # monitor:关注的指标,一般是目标函数的值 # min_delta:预值,如果比预值低,就停 # patience:当前后两次目标函数的值比min_delta小多少次时,关掉 history = model.fit(x_train_scaled, y_train, epochs=10, validation_data=(x_valid_scaled, y_valid), callbacks = callbacks) # epochs: 迭代10次 ; validation:验证

    修改

    #logdir = './callbacks' #定义文件夹 logdir = os.path.join("callbacks")

    3

    报错:AssertionError: Bad argument number for Name: 3, expecting 4 修改:

    pip install gast==0.2.2 #0.3.2版本的问题,降级后可以

    4

    ValueError: Protocol message Features has no “features” field.

    def serialize_example(x, y): """Converts x, y to tf.train.Example and serialize""" input_features = tf.train.FloatList(value = x) label = tf.train.FloatList(value = y) features = tf.train.Features( features = { "input_features": tf.train.Feature( float_list = input_features), "label": tf.train.Feature(float_list = label) } ) example = tf.train.Example(features =features) return example.SerializeToString()

    修改:取消features中的s

    feature = { "input_features": tf.train.Feature( float_list = input_features), "label": tf.train.Feature(float_list = label) }

    5

    报错:inflate() failed with error -3: incorrect header check [Op:IteratorGetNextSync]

    dataset = filename_dataset.interleave( lambda filename: tf.data.TFRecordDataset( filename, compression_type = "GZIP"), cycle_length = n_readers )

    修改为:

    dataset = dataset.interleave( lambda filename: tf.data.TFRecordDataset( filename, compression_type = "GZIP"), cycle_length = n_readers )
    Processed: 0.010, SQL: 9