2017-07-28 5 views
0

Mein Code ist wie folgt.Ich versuche, meine eigene RNN-Zelle in Tensorflow zu machen, aber es funktioniert nicht

import numpy as np 
import tensorflow as tf 
import matplotlib.pyplot as plt 
from tensorflow.python.ops.rnn_cell_impl import _zero_state_tensors 

class CapRNNcell(tf.contrib.rnn.RNNCell): 
     def __init__(self, input_dim): 
     self.input_dim = input_dim 

     @property 
     def state_size(self): 
     return 1 

     @property 
     def output_size(self): 
     return 1 

     def call(self, inputs, state): 
      W=weight_variable([self.input_dim , 1]) 
      b=bias_variable([1]) 

      output =state*tf.nn.sigmoid(tf.matmul(inputs,W)+b) 

Form output = [batch_size, 1] return-Ausgang, Ausgang

def CapRnnModel(timeSeries_before_forgetting_gate , init_cap): 
    cell = CapRNNcell(input_dim=3) 
    cap_series, final_cap = tf.nn.dynamic_rnn(cell=cell , inputs=timeSeries_before_forgetting_gate, initial_state=init_cap) 

    return cap_series , final_cap 

timeSeries_before_forgetting_gate:

shape = [batch_size , truncated_length , self.cell_state_dim] 

init_cap : shape = [batch_size , 1] 

cap_series : shape=[batch_size , turncated_length , 1] 

final_cap : shape=[batch_size , 1] 

x_place=tf.placeholder(tf.float32 , [1,2,3]) 
init_cap_place=tf.placeholder(tf.float32 , [1,1]) 
y=CapRnnModel(x_place,init_cap_place) 

with tf.Session() as sess: 
    sess.run(tf.initialize_all_variables()) 
    a=np.random.rand(1,2,3) 
    b=np.random.rand(1,1) 
    result=sess.run(y,feed_dict={x_place:a , init_cap_place:b}) 
    print(result) 

Ich versuche, meine eigene rnn Zelle zu machen und anzuwenden dies zu tf.nn.dynamic_rnn. Also habe ich meine eigene Zellklasse (Unterklasse von tf.contrib.rnn.RNNCell) erstellt und einen einfachen Vorwärtsberechnungstest durchgeführt. Es funktioniert jedoch nicht Der Fehler ist als

Traceback (most recent call last): 
    File "D:/MyDocuments/PycharmProjects/RNN_tutorial/customizedRNNcellTest.py", line 85, in <module> 
    y=CapRnnModel(x_place,init_cap_place) 
    File "D:/MyDocuments/PycharmProjects/RNN_tutorial/customizedRNNcellTest.py", line 76, in CapRnnModel 
    cap_series, final_cap = tf.nn.dynamic_rnn(cell=cell , inputs=timeSeries_before_forgetting_gate, initial_state=init_cap) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\rnn.py", line 574, in dynamic_rnn 
    dtype=dtype) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\rnn.py", line 737, in _dynamic_rnn_loop 
    swap_memory=swap_memory) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2770, in while_loop 
    result = context.BuildLoop(cond, body, loop_vars, shape_invariants) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2599, in BuildLoop 
    pred, body, original_loop_vars, loop_vars, shape_invariants) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\control_flow_ops.py", line 2549, in _BuildLoop 
    body_result = body(*packed_vars_for_body) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\rnn.py", line 722, in _time_step 
    (output, new_state) = call_cell() 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\rnn.py", line 708, in <lambda> 
    call_cell = lambda: cell(input_t, state) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py", line 180, in __call__ 
    return super(RNNCell, self).__call__(inputs, state) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\layers\base.py", line 414, in __call__ 
    self._set_scope(kwargs.pop('scope', None)) 
    File "C:\Users\MINHO KIM\Anaconda3\lib\site-packages\tensorflow\python\layers\base.py", line 335, in _set_scope 
    if self._scope is None: 
AttributeError: 'CapRNNcell' object has no attribute '_scope' 

Process finished with exit code 1 

folgt Was ist los ?? :(

+0

Verbesserte Formatierung. entfernt übermäßige Leerzeilen, Wieder gegliederten Code – phd

Antwort

0

Ich nehme an W=weight_variable([self.input_dim , 1]) und b=bias_variable([1]) definieren Gewichte und Bias des Modells. call die Vorwärtspass weiterspielt. In Ihrem Fall, Sie versuchen, einen neuen Satz von Parametern an jedem Vorwärts-Pass zu bekommen. Ich Variablendefinitionen in die bewegt . Konstruktor Hier können Sie die laufende Version sehen (ich habe tensorflow 1.2.1):

import numpy as np 
import tensorflow as tf 
import matplotlib.pyplot as plt 
from tensorflow.python.ops.rnn_cell_impl import _zero_state_tensors 

class CapRNNcell(tf.contrib.rnn.RNNCell): 
    def __init__(self, input_dim): 
     self.input_dim = input_dim 

     self.W = tf.get_variable("W", [self.input_dim , 1], tf.float32) 
     self.b = tf.get_variable("b", [1]) 

    @property 
    def state_size(self): 
     return 1 

    @property 
    def output_size(self): 
     return 1 

    def __call__(self, inputs, state): 
     output =state*tf.nn.sigmoid(tf.matmul(inputs, self.W)+ self.b) 

     return output, output 

def CapRnnModel(timeSeries_before_forgetting_gate, init_cap): 

    cap_cell = CapRNNcell(input_dim=3) 
    cap_series, final_cap = tf.nn.dynamic_rnn(cell=cap_cell, inputs=timeSeries_before_forgetting_gate, initial_state=init_cap) 

    return cap_series , final_cap 

x_place=tf.placeholder(tf.float32 , [1,2,3]) 
init_cap_place=tf.placeholder(tf.float32 , [1,1]) 

y=CapRnnModel(x_place, init_cap_place) 

with tf.Session() as sess: 
    sess.run(tf.initialize_all_variables()) 
    a=np.random.rand(1,2,3) 
    b=np.random.rand(1,1) 
    result=sess.run(y,feed_dict={x_place:a , init_cap_place:b}) 
    print(result) 
+0

vielen dank !!! Es funktioniert !! aber ein mehr Frage warum hast du tf.get_variable() anstelle von tf.variable() verwendet? – Eric

+0

In diesem Fall könnten wir äquivalent "tf.varaible" verwenden. Ich finde "tf.get_variable" mehr p kraftvoll, da Sie die Parameter wiederverwenden können. Sie finden die Unterschiede auf https://www.tensorflow.org/programmers_guide/variable_scope – eaksan

Verwandte Themen