层密集的输入0与层不兼容:轴应为1

2024-05-12 22:23:46 发布

您现在位置:Python中文网/ 问答频道 /正文

我正在尝试修复错误“ValueError:Input 0 of layer dense is incompatible with the layer:expected axis-1 of Input shape to have value 602 but received Input with shape[None,370]”(更多控制台输出如下所示)

在尝试使用createModel函数从连接矩阵创建神经网络模型,然后尝试使用手写数字MNIST数据库训练模型之后,我得到了这个错误。有人能帮我解决输出错误吗?所有相关代码如下所示。非常感谢您的帮助!先谢谢你。你知道吗

错误:

(base) [patelvrajn@VArchLinux Proof_of_Concept]$ python3 Posted/main.py
Generating chromosome         1 of         5
Finished generating chromosome         1
Generating chromosome         2 of         5
Finished generating chromosome         2
Generating chromosome         3 of         5
Finished generating chromosome         3
Generating chromosome         4 of         5
Finished generating chromosome         4
Generating chromosome         5 of         5
Finished generating chromosome         5
Finished generating all chromosomes.
Generating models

...

ValueError: Input 0 of layer dense is incompatible with the layer: expected axis -1 of input shape to have value 602 but received input with shape [None, 370]

下面的代码是我在主.py地址:

from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.layers import Dense, Input, concatenate, Lambda
from random import *
import numpy as np
import chromosome as c
from enum import Enum

EPOCH_OR_ERROR_PROBABILITY = 50
EPOCHS_MAXIMUM = 1000 
TRAINING_SET_MINIMUM = 100
LAYERS_MAXIMUM = 4
NEURONSPERLAYER_MAXIMUM = 4
INITIAL_POPULATION_SIZE = 5

class Model_Gen_State(Enum):
    inputlayer_state = 1
    hiddenlayer_state = 2
    outputlayer_state = 3

#Generates a random integer or float - works!
def randomNumber(minimum,maximum,isFloat):
    random = 0
    if isFloat:
        random = uniform(minimum, maximum)
    else:
        random = randint(minimum, maximum)
    return random

def createModel(connectivity_matrix,activation_list):
    #initialize initial/current state to input layer state
    current_state = Model_Gen_State.inputlayer_state

    #initialize variable for storing input layer
    input_layer = keras.Input(shape=(1,))

    #initialize list for skipped neurons & next layer skipped neurons
    skipped_neurons = []
    next_layer_skipped_neurons = []

    #loop through each array in the connectivity matrix
    for array_index in range(len(connectivity_matrix)):
        #change number of neurons at current layer according to index array
        layer_n_count = len(connectivity_matrix[array_index])

        #switch states
        if(array_index == len(connectivity_matrix) - 1):
            current_state = Model_Gen_State.outputlayer_state
        elif(array_index == 0):
            current_state = Model_Gen_State.inputlayer_state
        else:
            current_state = Model_Gen_State.hiddenlayer_state

        #initialize layer for processing according to current state
        layer = keras.Input(shape=(layer_n_count,))
        if(current_state == Model_Gen_State.inputlayer_state):
            input_layer = layer
        if(current_state == Model_Gen_State.hiddenlayer_state or current_state == Model_Gen_State.outputlayer_state):
            layer = next_layer

        #initialize counter for activation list
        al_index = 0

        #initialize list for storing groups
        groups_to_concat = []

        #reassign skipped_neurons for new layer and empty new layer skipped neurons
        skipped_neurons = next_layer_skipped_neurons
        next_layer_skipped_neurons = []

        #loop through column
        for col in range(len(connectivity_matrix[array_index][0])):            
            #initialize counter rows processed in this column
            processed_rows = 0

            #initialize list for storing segments  
            segments_to_concat = []

            #loop through all the rows in the column
            while(processed_rows < layer_n_count):
                #skip the neuron if it was in skipped neurons list 
                skip = False
                for skipped_neuron in skipped_neurons:
                    if(skipped_neuron == processed_rows):
                        skipped_neurons.remove(skipped_neuron)
                        skip = True
                if(skip):
                    processed_rows = processed_rows + 1
                    continue

                #the neuron's connectivity at fixed array (input only) and fixed col with varying row
                in_neuron = connectivity_matrix[array_index][processed_rows][col]

                #check if neuron's connectivity is 0 for this row
                if(in_neuron == 0):
                    processed_rows = processed_rows + 1
                    continue

                #start at a row where neuron connectivity is 1
                start_row = processed_rows

                #loop until you find the first 0 in the column with 0 neuron connectivity
                while(in_neuron == 1):
                    #increased the number of processed rows
                    processed_rows = processed_rows + 1
                    #do not let processed rows exceed the number of rows for this array
                    if(processed_rows >= layer_n_count):
                        processed_row = processed_rows - 1
                        break
                    #reassign the current neuron being processed
                    in_neuron = connectivity_matrix[array_index][processed_rows][col]

                #set end row to the last row where neuron connectivity was 1
                end_row = processed_rows

                #create the segment where output neurons had a connection to this neuron with consecutive 1s with the connectivity matrix
                input_segment = Lambda(lambda x: x[:,start_row:(end_row +1)])(layer)

                #add to segment list
                segments_to_concat.append(input_segment)

            #check length of segment list and set group according
            if(len(segments_to_concat) == 0):
                next_layer_skipped_neurons.append(col)
                continue
            group = segments_to_concat[0]
            if(len(segments_to_concat) >= 2):
                group = tf.keras.layers.concatenate(segments_to_concat)

            #make group of segments into a dense layer
            group = Dense(1, activation= activation_list[al_index])(group)

            #increase activation list index
            al_index = al_index + 1

            #add to group list
            groups_to_concat.append(group)

        #check group list length and set next layer according
        if(len(groups_to_concat) == 0):
            return
        next_layer = groups_to_concat[0]
        if(len(groups_to_concat) >= 2):
            next_layer = tf.keras.layers.concatenate(groups_to_concat)

        if(current_state == Model_Gen_State.outputlayer_state):
            return keras.Model(input_layer,next_layer)

        #empty the group list between layers
        groups_to_concat = []

        #assert to assure neurons in connectivity matrix and neurons generated are within range of each other
        assert(next_layer.shape[1] <= len(connectivity_matrix[array_index + 1])), "Layers mysteriously appeared at initialization of model."

def downloadMNIST(model, learning_rate, momentum, lr_decay):
    #MNIST Dataset
    (x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
    x_train = x_train.reshape(60000, 784).astype('float32') / 255
    x_test = x_test.reshape(10000, 784).astype('float32') / 255
    y_train = y_train.astype('float32')
    y_test = y_test.astype('float32')
    x_val = x_train[-10000:]
    y_val = y_train[-10000:]
    x_train = x_train[:-10000]
    y_train = y_train[:-10000]
    model.compile(
            # Optimizer
            optimizer=keras.optimizers.SGD(lr=learning_rate, momentum=momentum, decay=lr_decay, nesterov=True),
            # Loss function to minimize
            loss=keras.losses.SparseCategoricalCrossentropy(),
            # List of metrics to monitor
            metrics=[keras.metrics.SparseCategoricalAccuracy()])
    history = model.fit(x_train, y_train, batch_size=64, epochs=128, validation_data=(x_val, y_val))
    results = model.evaluate(x_test, y_test, batch_size=128)
    predictions = model.predict(x_test[:3])
    return keras.datasets.mnist.load_data()

def optimizerRandomSelection():
    optimizers = [keras.optimizers.Adadelta, keras.optimizers.Adagrad, keras.optimizers.Adam, 
                 keras.optimizers.Adamax, keras.optimizers.Ftrl, keras.optimizers.Nadam, 
                 keras.optimizers.RMSprop, keras.optimizers.SGD]
    return optimizers[randomNumber(0,len(optimizers)-1, False)]

def activationfunctionRandomSelection():
    activationfunctions = [keras.activations.elu, keras.activations.exponential, keras.activations.hard_sigmoid,
                          keras.activations.linear, keras.activations.relu, keras.activations.selu,
                          keras.activations.sigmoid, keras.activations.softmax, keras.activations.softplus,
                          keras.activations.softsign, keras.activations.tanh]
    return activationfunctions[randomNumber(0,len(activationfunctions) -1, False)]

def generateConnectivityMatrix(numofInputs, numberoflayers_maximum, numberofneurons_maximum):
    connectivitymatrix = []
    currentlayerneurons = numofInputs
    for index in range(numberoflayers_maximum - 1):
        nextlayerneurons = randomNumber(1, numberofneurons_maximum, False)
        layerconnectivity = np.zeros((currentlayerneurons,nextlayerneurons))
        for cn in range(currentlayerneurons):
            for nn in range(nextlayerneurons):
                layerconnectivity[cn][nn] = randomNumber(0, 1, False)
        currentlayerneurons = nextlayerneurons
        connectivitymatrix.append(layerconnectivity)
    return connectivitymatrix

def generateActivationList(numberofneurons):
    activationlist = []
    size = 0;
    while(size != numberofneurons):
        activationlist.append(activationfunctionRandomSelection())
        size = size + 1
    return activationlist

def main():
    #Initialization
    #Generate a list of random chromosomes
    chromosome_list = []
    for member_count in range(INITIAL_POPULATION_SIZE): 
        print("Generating chromosome % 9d of % 9d" %(member_count+1,INITIAL_POPULATION_SIZE))  
        cm = generateConnectivityMatrix(784, LAYERS_MAXIMUM, NEURONSPERLAYER_MAXIMUM)

        neuron_count = 0
        for cmarray_index in range(len(cm)):
            rows = len(cm[cmarray_index])
            cols = len(cm[cmarray_index][rows-1])
            if(cmarray_index == 0):
                neuron_count = neuron_count + (rows+cols)
            else:
                neuron_count = neuron_count + cols

        al = generateActivationList(neuron_count)

        o = optimizerRandomSelection()

        (x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
        ts=randomNumber(TRAINING_SET_MINIMUM,len(x_train),False)

        ee = -1
        dice_roll = randomNumber(1,100,False)
        if(dice_roll >= EPOCH_OR_ERROR_PROBABILITY):
            ee = randomNumber(1, EPOCHS_MAXIMUM, False)

        cc = c.Chromosome(cm,al,o,ee,ts)
        chromosome_list.append(cc)
        print("Finished generating chromosome % 9d" %(member_count+1))

    print("Finished generating all chromosomes.")

    #From the list create a list of neural network models
    #Map the chromosome to their respective models
    for chromosome in chromosome_list:
        print("Generating models")
        chromosome.m = createModel(chromosome.cm, chromosome.al)
        downloadMNIST(chromosome.m, 0.01, 0.9, 1e-6)

    print("Complete")

if __name__ == "__main__":
    main()

下面的代码是我在染色体.py地址:

import numpy as np

class Chromosome:

    def __init__(self, connectivity_matrix, activation_list, optimizer, epochs_or_error, training_set):
        self.cm = connectivity_matrix
        self.al = activation_list
        self.o = optimizer
        self.ee = epochs_or_error
        self.ts = training_set
        self.m = None

Tags: ofthetoinlayerindexiflist