参考:Create regressgan.py · starhou/One-dimensional-GAN@797e150 · GitHub(github上一个生成一维数据的代码)

           4.编写生成器模型和辨别器模型_哔哩哔哩_bilibili

           深度卷积生成对抗网络  |  TensorFlow Core(tensorflow官网)

先保存一下,########################

#noise_dim为生成假数据的组数,若为5,即生成5组噪声数据
#n为原始数据的组数,若为100,即为100组原始数据
#若fake_acc,real_acc为0.5左右,则说明生成的数据较好
import numpy as np
from numpy.random import rand,randn
import matplotlib.pyplot as plt
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential

def get_real_samples(n):
    X1=rand(n)-0.5
    X2=X1*X1
    X1=X1.reshape(n,1)
    X2=X2.reshape(n,1)
    X=np.hstack((X1,X2,X1,X1,X1,X2))
    y=np.ones((n,1))
    return X,y

def disc_model(input_dim=6):
    model=Sequential()
    model.add(Dense(25,activation='relu',input_dim=input_dim))
    model.add(Dense(1,activation='sigmoid'))
    model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
    return model

def noise_points(noise_dim,n):
    noise=randn(n*noise_dim)
    noise=noise.reshape(n,noise_dim)
    return noise

def get_fake_samples(gen,noise_dim,n):
    x_input=noise_points(noise_dim,n)
    X=gen.predict(x_input)
    y=np.zeros((n,1))
    return X,y

def gen_model(input_dim,output_dim=6):
    model=Sequential()
    model.add(Dense(15,activation='relu',input_dim=input_dim))
    model.add(Dense(output_dim,activation='linear'))
    return model

def gan_model(disc,gen):
    disc.trainable=False
    model=Sequential()
    model.add(gen)
    model.add(disc)
    model.compile(loss='binary_crossentropy',optimizer='adam')
    return model

def train(g_model,d_model,gan_model,noise_dim,epochs=30000,batch_size=256,n_eval=2000):
    half_batch=batch_size//2
    for i in range(epochs):
        x_real,y_real=get_real_samples(half_batch)
        x_fake,y_fake=get_fake_samples(g_model,noise_dim,half_batch)
        d_model.train_on_batch(x_real,y_real)
        d_model.train_on_batch(x_fake,y_fake)
        x_gan=noise_points(noise_dim,batch_size)
        y=np.ones((batch_size,1))
        gan_model.train_on_batch(x_gan,y)
        if (i+1)%n_eval == 0:
            show_performance(i+1,g_model,d_model,noise_dim)
            
def show_performance(epoch,g_model,d_model,noise_dim,n=100):
    x_real,y_real=get_real_samples(n)
    _,real_acc=d_model.evaluate(x_real,y_real,verbose=0)
    x_fake,y_fake=get_fake_samples(g_model,noise_dim,n)
    _,fake_acc=d_model.evaluate(x_fake,y_fake,verbose=0)
    print(epoch,real_acc,fake_acc)
    plt.figure(figsize=(5,5))
    plt.scatter(x_real[:,0],x_real[:,1],color='red')
    plt.scatter(x_fake[:,0],x_fake[:,1],color='blue')
    
noise_dim=5
gen=gen_model(noise_dim)    
disc=disc_model()
gan=gan_model(disc,gen)
train(gen,disc,gan,noise_dim)
    
import numpy as np
from numpy.random import rand,randn
import matplotlib.pyplot as plt
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential
import pandas as pd
 
def get_real_samples(n):
    X=pd.read_excel('D://graduate.xlsx')
    y=np.ones((n,1))
    return X,y

def disc_model(input_dim=14):
    model=Sequential()
    model.add(Dense(28,activation='relu',input_dim=input_dim))
    model.add(Dense(1,activation='sigmoid'))
    model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
    return model
 
def noise_points(noise_dim,n):
    noise=randn(n*noise_dim)
    noise=noise.reshape(n,noise_dim)
    return noise
 
def get_fake_samples(gen,noise_dim,n):
    x_input=noise_points(noise_dim,n)
    X=gen.predict(x_input)
    y=np.zeros((n,1))
    return X,y
 
def gen_model(input_dim,output_dim=14):
    model=Sequential()
    model.add(Dense(23,activation='sigmoid',input_dim=input_dim))
    model.add(Dense(output_dim,activation='linear'))
    return model
 
def gan_model(disc,gen):
    disc.trainable=False
    model=Sequential()
    model.add(gen)
    model.add(disc)
    model.compile(loss='binary_crossentropy',optimizer='adam')
    return model
 
def train(g_model,d_model,gan_model,noise_dim,epochs=200000,batch_size=114,n_eval=100):
    half_batch=batch_size//2
    for i in range(epochs):
        x_real,y_real=get_real_samples(half_batch)
        x_fake,y_fake=get_fake_samples(g_model,noise_dim,half_batch)
        d_model.train_on_batch(x_real,y_real)
        d_model.train_on_batch(x_fake,y_fake)
        x_gan=noise_points(noise_dim,batch_size)
        y=np.ones((batch_size,1))
        gan_model.train_on_batch(x_gan,y)
        if (i+1)%n_eval == 0:
            show_performance(i+1,g_model,d_model,noise_dim)
            mingming=str(i)+'.txt'
            np.savetxt(mingming,x_fake)
def show_performance(epoch,g_model,d_model,noise_dim,n=57):
    x_real,y_real=get_real_samples(n)
    _,real_acc=d_model.evaluate(x_real,y_real,verbose=0)
    x_fake,y_fake=get_fake_samples(g_model,noise_dim,n)
    _,fake_acc=d_model.evaluate(x_fake,y_fake,verbose=0)
#    print(epoch,real_acc,fake_acc)
    a=[epoch,real_acc,fake_acc]
    print(a)
    b.append(a)
    np.savetxt('Result.txt',b)
#    plt.figure(figsize=(5,5))
#    plt.scatter(x_real[:,0],x_real[:,1],color='red')
#    plt.scatter(x_fake[:,0],x_fake[:,1],color='blue')
b=[]   
noise_dim=14
gen=gen_model(noise_dim)    
disc=disc_model()
gan=gan_model(disc,gen)
train(gen,disc,gan,noise_dim) 

Logo

魔乐社区(Modelers.cn) 是一个中立、公益的人工智能社区,提供人工智能工具、模型、数据的托管、展示与应用协同服务,为人工智能开发及爱好者搭建开放的学习交流平台。社区通过理事会方式运作,由全产业链共同建设、共同运营、共同享有,推动国产AI生态繁荣发展。

更多推荐