Skip to content

Commit

Permalink
Revert "Use myReshape instead of reshape or lapply to create the batc…
Browse files Browse the repository at this point in the history
…hes. (faster than original, but slower than lapply)"

This reverts commit 19c5e58.
  • Loading branch information
caewok committed Jan 30, 2013
1 parent 19c5e58 commit 21d886d
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 10 deletions.
12 changes: 10 additions & 2 deletions Neural Net Language Model/LoadData.R
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,16 @@ load_data <- function(N) {

# shift to an list of M minibatches, each with D*N
# looks like we threw out the remainder training data
train_input <- myReshape(data$trainData[1:D, 1:(N*M), drop=F], D, N, M)
train_target <- myReshape(data$trainData[D + 1, 1:(N*M), drop=F], 1, N, M)
start <- seq.int(1, N*M, by=1000)
end <- seq.int(1000, N*M, by=1000)
train_input <- mapply(function(x, start, end) x[,start:end],
start=seq.int(1, N*M, by=1000),
end=seq.int(1000, N*M, by=1000),
MoreArgs=list(x=data$trainData[1:D, 1:(N*M)]), SIMPLIFY=F)
train_target <- mapply(function(x, start, end) x[,start:end],
start=seq.int(1, N*M, by=1000),
end=seq.int(1000, N*M, by=1000),
MoreArgs=list(x=data$trainData[D + 1, 1:(N*M), drop=F]), SIMPLIFY=F)
valid_input <- data$validData[1:D,, drop=F]
valid_target <- data$validData[D + 1, , drop=F]
test_input <- data$validData[1:D, , drop=F]
Expand Down
16 changes: 8 additions & 8 deletions Neural Net Language Model/Train.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ train <- function(epochs) {

# % LOAD DATA.
data <- load_data(batchsize)
tmp <- size(data$train_input)
tmp <- size(data$train_input[[1]])
numwords <- tmp[1]
#batchsize <- tmp[2]
numbatches <- tmp[3]
#numbatches <- length(data$train_input)
#numbatches <- tmp[3]
numbatches <- length(data$train_input)
vocab_size <- size(data$vocab, 2)

word_embedding_weights = init_wt * randn(vocab_size, numhid1);
Expand Down Expand Up @@ -75,14 +75,14 @@ train <- function(epochs) {
#while(hasNext(inputIT) & hasNext(targetIT)) {


input_batch <- data$train_input[,,m]
target_batch <- data$train_target[,,m]
#input_batch <- data$train_input[,,m]
#target_batch <- data$train_target[,,m]
#input_batch <- nextElem(inputIT)
#target_batch <- nextElem(targetIT)
#input_batch <- data$train_input[[m]]
#target_batch <- data$train_target[[m]]
input_batch <- data$train_input[[m]]
target_batch <- data$train_target[[m]]

#dim(target_batch) <- NULL
dim(target_batch) <- NULL

# % FORWARD PROPAGATE.
# % Compute the state of each layer in the network given the input batch and all weights and biases
Expand Down

0 comments on commit 21d886d

Please sign in to comment.