Skip to content

Commit 11fd4bc

Browse files
remove useless logs
1 parent 6b0d9c5 commit 11fd4bc

File tree

2 files changed

+24
-24
lines changed

2 files changed

+24
-24
lines changed

deccom/protocols/delayprotocol.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,14 @@ def __init__(self, delay_map, submodule=None, callback: Callable[[tuple[str, int
1515

1616
#self.stream_callback(data,node_id,addr)
1717
async def send_stream(self,node_id,data, ignore_sz = 0):
18-
print("delay...")
18+
# print("delay...")
1919
p = self.get_peer(node_id)
20-
print(p)
20+
# print(p)
2121
loop = asyncio.get_event_loop()
2222
dl = self.delay_map(p.pub_key, self.peer.pub_key)
2323
sz = len(data) - ignore_sz
24-
print(dl)
25-
print("will send in ",dl[0]/1000 + sz/(1024**3*dl[1]))
24+
# print(dl)
25+
# print("will send in ",dl[0]/1000 + sz/(1024**3*dl[1]))
2626
await asyncio.sleep(dl[0]/1000 + sz/(1024**3*dl[1]))
2727
if self.started:
2828
return await self._lower_send_to(node_id,data)

deccom/protocols/streamprotocol.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ async def open_connection(self, remote_ip, remote_port, node_id: bytes, port_lis
150150

151151

152152
if self.connections.get(node_id) != None:
153-
print("duplicate connection OPENED")
153+
# print("duplicate connection OPENED")
154154
self.connections.get(node_id).using += 1
155155
return True
156156
loop = asyncio.get_event_loop()
@@ -236,8 +236,8 @@ async def listen_for_data(self, reader: asyncio.StreamReader, node_id = None, ad
236236
return
237237
# seqrand = random.randint(1,40000)
238238
#print("listening for data")
239-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
240-
log.write(f"listening for data {node_id} \n")
239+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
240+
# log.write(f"listening for data {node_id} \n")
241241

242242
try:
243243
data = await reader.readexactly(32)
@@ -271,9 +271,9 @@ async def listen_for_data(self, reader: asyncio.StreamReader, node_id = None, ad
271271
buffer = bytearray()
272272
i = int.from_bytes(data,byteorder="big")
273273
if i != 0:
274-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
275-
log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
276-
log.write(f" will from {self.get_peer(node_id).pub_key} {i} {len(data)}\n")
274+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
275+
# log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
276+
# log.write(f" will from {self.get_peer(node_id).pub_key} {i} {len(data)}\n")
277277

278278
while i > 0:
279279
data = await reader.read(min(i, 9048))
@@ -283,15 +283,15 @@ async def listen_for_data(self, reader: asyncio.StreamReader, node_id = None, ad
283283
return
284284
if node_id !=None and self.connections.get(node_id) != None:
285285
self.connections[node_id].fut = None
286-
print("closing because received empty bytes", addr,node_id)
286+
# print("closing because received empty bytes", addr,node_id)
287287
self.remove_from_dict(node_id)
288288
self.closed_stream(node_id, addr)
289289
return
290290
i -= len(data)
291291
buffer+=data
292-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
293-
log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
294-
log.write(f" receive from {self.get_peer(node_id).pub_key} {len(buffer)}\n")
292+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
293+
# log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
294+
# log.write(f" receive from {self.get_peer(node_id).pub_key} {len(buffer)}\n")
295295
# print(seqrand,"read",len(buffer), "from",self.get_peer(node_id).pub_key)
296296
loop = asyncio.get_event_loop()
297297
asyncio.run_coroutine_threadsafe(self._caller(buffer,node_id,addr), loop)
@@ -307,18 +307,18 @@ async def send_stream(self, node_id, data, lvl = 0):
307307
return False
308308
try:
309309
async with self.locks[node_id]:
310-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
311-
log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
312-
log.write(f" sending to {self.get_peer(node_id).pub_key} {len(data)}\n")
310+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
311+
# log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
312+
# log.write(f" sending to {self.get_peer(node_id).pub_key} {len(data)}\n")
313313

314314
self.connections[node_id].writer.write(len(data).to_bytes(32,byteorder="big"))
315315
await self.connections[node_id].writer.drain()
316316
self.connections[node_id].writer.write(data)
317317
await self.connections[node_id].writer.drain()
318318
except ConnectionResetError:
319-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
320-
log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
321-
log.write(f" cannot send to {self.get_peer(node_id).pub_key} {len(data)}\n")
319+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
320+
# log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
321+
# log.write(f" cannot send to {self.get_peer(node_id).pub_key} {len(data)}\n")
322322
await asyncio.sleep(3)
323323
p: Peer = self.get_peer(node_id)
324324
if p == None:
@@ -327,15 +327,15 @@ async def send_stream(self, node_id, data, lvl = 0):
327327
if ret == False:
328328
return False
329329
return await self.send_stream(node_id,data, lvl=lvl+1)
330-
with open(f"log{self.peer.pub_key}.txt", "a") as log:
331-
log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
332-
log.write(f" finished sending to {self.get_peer(node_id).pub_key} {len(data)}\n")
330+
# with open(f"log{self.peer.pub_key}.txt", "a") as log:
331+
# log.write(datetime.now().strftime("%d/%m/%Y, %H:%M:%S"))
332+
# log.write(f" finished sending to {self.get_peer(node_id).pub_key} {len(data)}\n")
333333
# print("done srream")
334334
return True
335335
def set_stream_close_callback(self, callback):
336336
self.stream_close_callback = callback
337337
async def _caller(self,data,node_id,addr):
338-
print("received data... ", len(data))
338+
# print("received data... ", len(data))
339339
try:
340340
self.stream_callback(data,node_id,addr)
341341
except Exception:

0 commit comments

Comments
 (0)