Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

nonLincausality is not running on CPU machine #5

Open
RakeshSharma21 opened this issue Jan 23, 2023 · 0 comments
Open

nonLincausality is not running on CPU machine #5

RakeshSharma21 opened this issue Jan 23, 2023 · 0 comments

Comments

@RakeshSharma21
Copy link

Hello mrosol,

I am trying to run the following code on the CPU machine. but getting the error pasted below the code. Please help to resolve this error. I am using python 3.7 and latest version of nonLincausality.

import nonlincausality as nlc
import numpy as np
lags = [10,20,30]
n_obs = 53
result_of_non_causality=[]
for col in columns_candidate_for_x:
initial_list=[col]
print(f'non-causality for {initial_list}')
initial_list.append('Nifty_Price')
col_nifty_df=sentiment_all_exch_rate_all_indices_normal_Df[initial_list]
df_train, df_test = col_nifty_df[0:-n_obs], col_nifty_df[-n_obs:]
results = nlc.nonlincausalityGRU(x=np.array(df_train), maxlag=lags, GRU_layers=2, GRU_neurons=[25,25], Dense_layers=2, Dense_neurons=[100, 100], x_test=np.array(df_test), run=2, add_Dropout=True, Dropout_rate=0.01, epochs_num=[100], learning_rate=[0.001], batch_size_num=128, verbose=False, plot=False)
for lag in lags:
single_record={}
single_record['X_value']=col

p_value = results[lag].p_value
test_statistic = results[lag].test_statistic

best_errors_X = results[lag].best_errors_X
best_errors_XY = results[lag].best_errors_XY
cohens_d = np.abs(
    (np.mean(np.abs(best_errors_X)) - np.mean(np.abs(best_errors_XY)))
    / np.std([best_errors_X, best_errors_XY])
)
single_record['lag']=lag
single_record['test_statistic']=test_statistic
single_record['p_value']=p_value
single_record['cohens_d']=cohens_d
result_of_non_causality.append(single_record)

del results


KeyError Traceback (most recent call last)
~\AppData\Local\Temp\ipykernel_22704\273327593.py in
10 col_nifty_df=sentiment_all_exch_rate_all_indices_normal_Df[initial_list]
11 df_train, df_test = col_nifty_df[0:-n_obs], col_nifty_df[-n_obs:]
---> 12 results = nlc.nonlincausalityGRU(x=np.array(df_train), maxlag=lags, GRU_layers=2, GRU_neurons=[25,25], Dense_layers=2, Dense_neurons=[100, 100], x_test=np.array(df_test), run=2, add_Dropout=True, Dropout_rate=0.01, epochs_num=[100], learning_rate=[0.001], batch_size_num=128, verbose=False, plot=False)
13 for lag in lags:
14 single_record={}

~\Anaconda3\envs\thesispy37\lib\site-packages\nonlincausality\nonlincausality.py in nonlincausalityGRU(x, maxlag, GRU_layers, GRU_neurons, Dense_layers, Dense_neurons, x_test, run, z, z_test, add_Dropout, Dropout_rate, epochs_num, learning_rate, batch_size_num, regularization, reg_alpha, callbacks, verbose, plot)
713 input_layer = Input((data_shape[1], data_shape[2]))
714
--> 715 layers_dense = Dense(Dense_neurons[0], activation="relu")(input_layer)
716 # Adding Dropout
717 if add_Dropout:

~\Anaconda3\envs\thesispy37\lib\site-packages\nonlincausality\nonlincausality.py in run_nonlincausality(network_architecture, x, maxlag, Network_layers, Network_neurons, Dense_layers, Dense_neurons, x_test, run, z, z_test, add_Dropout, Dropout_rate, epochs_num, learning_rate, batch_size_num, regularization, reg_alpha, callbacks, verbose, plot, functin_type)
258 # Appending RSS, models, history of training and prediction errors to results object
259 result_lag.append_results(
--> 260 sum(error_X ** 2),
261 sum(error_XY ** 2),
262 model_X,

~\Anaconda3\envs\thesispy37\lib\site-packages\keras\utils\traceback_utils.py in error_handler(*args, **kwargs)
68 # To get the full stack trace, call:
69 # tf.debugging.disable_traceback_filtering()
---> 70 raise e.with_traceback(filtered_tb) from None
71 finally:
72 del filtered_tb

~\Anaconda3\envs\thesispy37\lib\site-packages\keras\engine\training.py in tf__train_function(iterator)
13 try:
14 do_return = True
---> 15 retval_ = ag__.converted_call(ag__.ld(step_function), (ag__.ld(self), ag__.ld(iterator)), None, fscope)
16 except:
17 do_return = False

KeyError: in user code:

File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\engine\training.py", line 1249, in train_function  *
    return step_function(self, iterator)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\engine\training.py", line 1233, in step_function  **
    outputs = model.distribute_strategy.run(run_step, args=(data,))
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\engine\training.py", line 1222, in run_step  **
    outputs = model.train_step(data)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\engine\training.py", line 1027, in train_step
    self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 527, in minimize
    self.apply_gradients(grads_and_vars)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 1140, in apply_gradients
    return super().apply_gradients(grads_and_vars, name=name)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 634, in apply_gradients
    iteration = self._internal_apply_gradients(grads_and_vars)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 1169, in _internal_apply_gradients
    grads_and_vars,
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 1217, in _distributed_apply_gradients_fn
    var, apply_grad_to_update_var, args=(grad,), group=False
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 1213, in apply_grad_to_update_var  **
    return self._update_step(grad, var)
File "C:\Users\00006262\Anaconda3\envs\thesispy37\lib\site-packages\keras\optimizers\optimizer_experimental\optimizer.py", line 217, in _update_step
    f"The optimizer cannot recognize variable {variable.name}. "

KeyError: 'The optimizer cannot recognize variable gru_8/gru_cell_8/kernel:0. This usually means you are trying to call the optimizer to update different parts of the model separately. Please call `optimizer.build(variables)` with the full list of trainable variables before the training loop or use legacy optimizer `tf.keras.optimizers.legacy.{self.__class__.__name__}.'
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant