When running .py file, It occurs the error that is 'SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape data text file'. Please help me running this without error successfully. I want to see the result perfectly.
-Code example(analysis_neuralnetwork2.py)
import numpy as np
from bayes_opt import BayesianOptimization
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.wrappers.scikit_learn import KerasRegressor
from keras.optimizers import SGD
from sklearn.model_selection import cross_val_score, cross_validate
import warnings
warnings.filterwarnings("ignore")
matplotlib.rcParams['font.sans-serif'] = "Arial"
matplotlib.rcParams['font.family'] = "sans-serif"
parameter_label = ['GNa', 'GNaB', 'pCa', 'GCaB', 'GtoFast', 'GK1_', 'GKr_', 'GKs_', 'Gkur',
'gkp', 'GClCa', 'GClB', 'IbarNaK', 'IbarNCX', 'ks', 'Kleak', 'Vmax_SRCaP', 'IbarSLCaP'];
phenotype_label = ['RMP (mV)', 'Vmax (V/s)', 'APD at PCL=1000 (ms)', 'APD at PCL=600 (ms)',
'RP (ms)', 'APD alternans threshold (ms)', 'Ca alternans threshold (ms)', 'Smax']
finetuned = np.array([1.1232841, 0.8867776, 0.9441124, 0.7631924, 0.7377310, 1.7002259, 1.0733276, 0.9908650, 0.7769591, 0.8579676, 1.0879098, 0.8795566, 1.0779592, 0.9726858, 1.1786529, 1.1667818, 1.2625186, 0.9541786]);
# Import data
data =np.array(np.genfromtxt('C:\Users\sanghyeoke\Documents\Downloads\Grandi_sampling_finetuned_1.txt', delimiter='\t', encoding='utf-8',missing_values='', skip_header=1))
n_sample = len(data)
n_parameter = 18
n_phenotype = 8
parameter = data[:, 1:(n_parameter+1)]
phenotype = data[:, (n_parameter+1):]
parameter = parameter / np.tile(finetuned, (n_sample,1))
# Remove outliers
noCaAlternans = phenotype[:,6]<1000
RPgreater50 = phenotype[:,4]>50
parameter = parameter[np.logical_and(noCaAlternans, RPgreater50),:]
phenotype = phenotype[np.logical_and(noCaAlternans, RPgreater50),:]
isOutlier = np.full(len(parameter), False, dtype=bool)
for i in range(n_phenotype):
phenotype_i = np.log10(phenotype[:,i]) if i==7 else phenotype[:,i]
m, sd = np.mean(phenotype_i), np.std(phenotype_i)
isOutlier_ = np.array([abs(x-m) > 2*sd for x in phenotype_i])
isOutlier = np.logical_or(isOutlier, isOutlier_)
parameter = parameter[~isOutlier,:]
phenotype = phenotype[~isOutlier,:]
parameter_log2 = np.log2(parameter)
Smax_log10 = np.log10(phenotype[:,7])
n_sample = len(parameter)
print('# of samples:', n_sample)
# Re-scale variables into (0,1)
parameter_log2 = (parameter_log2 + 1) * 0.5
Smax_log10 = (Smax_log10 - min(Smax_log10)) / (max(Smax_log10) - min(Smax_log10))
# Smax: MLP (neural network)
np.random.seed(0) # for reproducibility
def r_square(y_true, y_pred):
from keras import backend as K
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return (1 - SS_res/(SS_tot + K.epsilon()))
def neuralnetwork_model(n1, n2, dropout_rate, learning_rate):
def bm():
model = Sequential()
model.add(Dense(n1, input_dim=n_parameter, kernel_initializer='normal', activation='relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(n2, kernel_initializer='normal', activation='relu'))
model.add(Dropout(dropout_rate))
model.add(Dense(1, kernel_initializer='normal', activation='linear')) # output layer (regression)
model.compile(loss='mse', optimizer=SGD(lr=learning_rate), metrics=[r_square])
return model
return bm
def mlmodel_cv(n1, n2, dropout_rate, learning_rate, batch_size, epochs):
n1, n2, batch_size, epochs = list(map(lambda x: int(round(x)), [n1, n2, batch_size, epochs]))
mlmodel = KerasRegressor(build_fn=neuralnetwork_model(n1=n1,
n2=n2,
dropout_rate=dropout_rate,
learning_rate=learning_rate), batch_size=batch_size, epochs=epochs, verbose=0)
scores = cross_val_score(mlmodel, parameter_log2, Smax_log10, scoring='r2', cv=4)
return scores.mean()
optimizer = BayesianOptimization(
f = mlmodel_cv,
pbounds = {
'n1': (1, 100), # int
'n2': (1, 100), # int
'dropout_rate': (0.0, 0.5),
'learning_rate': (0.001, 0.5),
'batch_size': (5, 18), # int
'epochs': (50, 500) # int
},
random_state = 0
)
optimizer.maximize(init_points=10, n_iter=100)
print(optimizer.max)
-Error example
File "<ipython-input-42-fe85d14ebe0f>", line 25
data = np.array(np.genfromtxt('C:\Users\sanghyeoke\Documents\Downloads\Grandi_sampling_finetuned1.txt',
delimiter='\t',missing_values='', skip_header=1)) ^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3:truncated\UXXXXXXXX escape
Data file(Grandi_sampling_finetuned.txt)
#Idx GNa GNaB pCa GCaB GtoFast GK1_ GKr_ GKs_ Gkur gkp GClCa GClB IbarNaK IbarNCX ks Kleak Vmax_SRCaP IbarSLCaP RMP Vmax APD_1000ms APD_600ms RP APD_alternans_CL Ca_alternans_CL Smax
1 5.685624e-01 1.370670e+00 9.843815e-01 5.219677e-01 1.264108e+00 2.614267e+00 1.126691e+00 5.014469e-01 9.245995e-01 1.516378e+00 1.577079e+00 9.261715e-01 6.217115e-01 1.425196e+00 1.661312e+00 8.530741e-01 8.960570e-01 1.476508e+00 -7.854786e+01 1.990175e+02 2.429400e+02 2.517600e+02 1.900000e+02 1.900000e+02 2.100000e+02 1.124607e+01
2 1.137125e+00 6.853349e-01 4.921907e-01 1.043935e+00 6.320539e-01 1.307133e+00 5.633456e-01 1.002894e+00 4.622997e-01 7.581890e-01 7.885395e-01 4.630858e-01 1.243423e+00 7.125982e-01 8.306559e-01 1.706148e+00 1.792114e+00 7.382540e-01 -7.662217e+01 2.584475e+02 3.525000e+02 3.887500e+02 2.800000e+02 3.000000e+02 3.000000e+02 8.342865e+00
5 6.761385e-01 8.150051e-01 8.277628e-01 1.241455e+00 5.314919e-01 3.108904e+00 1.894861e+00 8.433298e-01 5.497701e-01 6.375584e-01 9.377368e-01 7.788143e-01 2.091180e+00 1.694854e+00 1.975644e+00 7.173470e-01 1.506982e+00 8.779370e-01 -7.993083e+01 2.710909e+02 2.433000e+02 2.437000e+02 2.100000e+02 2.200000e+02 2.300000e+02 6.461208e+00
25 6.474724e-01 1.103725e+00 7.926684e-01 5.944109e-01 9.334337e-01 1.930409e+00 5.394616e-01 1.615151e+00 6.827364e-01 8.634170e-01 1.384874e+00 6.838971e-01 1.544155e+00 1.769891e+00 1.124920e+00 1.781686e+00 1.020420e+00 7.709393e-01 -7.848863e+01 2.231171e+02 2.854700e+02 3.224200e+02 2.400000e+02 2.500000e+02 1.000000e+03 1.456483e+01
27 9.156643e-01 7.804516e-01 5.605012e-01 8.406240e-01 1.320075e+00 1.365005e+00 7.629139e-01 5.710420e-01 9.655350e-01 1.221056e+00 9.792540e-01 4.835883e-01 5.459411e-01 1.251502e+00 1.590878e+00 1.259842e+00 7.215456e-01 1.090273e+00 -7.621383e+01 2.077098e+02 2.286200e+02 2.643400e+02 2.600000e+02 2.700000e+02 2.800000e+02 6.709694e+00
36 1.643351e+00 7.313485e-01 1.249230e+00 1.214854e+00 1.348981e+00 1.456652e+00 2.022083e+00 1.218764e+00 6.976863e-01 1.689831e+00 1.611613e+00 9.063256e-01 1.385654e+00 1.075428e+00 1.149553e+00 1.344428e+00 1.412169e+00 1.324951e+00 -7.535797e+01 2.785459e+02 2.890400e+02 3.203600e+02 1.500000e+02 1.600000e+02 1.600000e+02 1.187817e+01
39 9.771423e-01 8.697249e-01 1.485593e+00 1.021566e+00 1.134353e+00 2.449787e+00 6.011688e-01 7.246814e-01 4.148468e-01 1.420973e+00 1.916541e+00 1.524252e+00 1.647830e+00 6.394533e-01 1.367056e+00 1.130525e+00 2.374975e+00 1.114147e+00 -7.752651e+01 2.704563e+02 2.103900e+02 2.093000e+02 5.000000e+01 5.000000e+01 9.000000e+01 1.216595e+01
54 1.443069e+00 1.177830e+00 9.224479e-01 1.163348e+00 7.043525e-01 2.558248e+00 6.846042e-01 7.567658e-01 6.126566e-01 1.247794e+00 1.682965e+00 1.338485e+00 6.634535e-01 7.282021e-01 8.488449e-01 1.985487e+00 2.480124e+00 1.645401e+00 -7.632445e+01 2.944793e+02 2.162000e+02 2.192300e+02 1.300000e+02 1.400000e+02 1.500000e+02 2.412609e+00
59 9.357147e-01 1.080075e+00 1.691777e+00 5.333973e-01 4.567161e-01 2.345924e+00 2.111608e+00 1.962808e+00 4.724228e-01 4.810902e-01 6.488716e-01 1.459629e+00 2.046370e+00 6.677643e-01 7.783942e-01 1.287429e+00 6.761487e-01 1.268778e+00 -7.788357e+01 2.744703e+02 2.155900e+02 2.332900e+02 1.100000e+02 1.200000e+02 1.200000e+02 1.108297e+01
60 1.871429e+00 5.400373e-01 8.458885e-01 1.066795e+00 9.134322e-01 1.172962e+00 1.055804e+00 9.814038e-01 9.448456e-01 9.621804e-01 1.297743e+00 7.298143e-01 1.023185e+00 1.335529e+00 1.556788e+00 6.437146e-01 1.352297e+00 6.343888e-01 -7.508160e+01 2.838593e+02 3.388300e+02 3.817000e+02 2.100000e+02 2.200000e+02 2.300000e+02 1.416615e+01