@@ -14,16 +14,18 @@ import fi.vm.sade.properties.OphProperties
14
14
import org .apache .commons .io .IOUtils
15
15
import scalaz .concurrent .Task
16
16
import support .TypedActorRef
17
- import scala .concurrent .duration ._
18
17
19
- import java .util .UUID
18
+ import java .time .LocalDate
19
+ import scala .concurrent .duration ._
20
+ import java .util .{Date , UUID }
20
21
import java .util .concurrent .atomic .{AtomicBoolean , AtomicReference }
21
22
import scala .concurrent .{Await , ExecutionContext , Future }
22
23
import scala .util .{Failure , Success , Try }
23
24
24
25
case class YtlSyncHaku (hakuOid : String , tunniste : String )
25
26
26
27
case class YtlSyncAllHaut (tunniste : String )
28
+ case class YtlSyncAllHautNightly (tunniste : String )
27
29
case class YtlSyncSingle (personOid : String , tunniste : String )
28
30
case class ActiveKkHakuOids (hakuOids : Set [String ])
29
31
case class YtlFetchActorRef (actor : ActorRef ) extends TypedActorRef
@@ -40,15 +42,37 @@ class YtlFetchActor(
40
42
41
43
val activeKKHakuOids = new AtomicReference [Set [String ]](Set .empty)
42
44
45
+ // val lastSyncStart = new AtomicReference[Option[LocalDate]](None)
46
+ val lastSyncStart = new AtomicReference [Long ](0 )
47
+ val minIntervalBetween = 1000 * 60 * 22 // At least 22 hours between nightly syncs
48
+
43
49
implicit val ec : ExecutionContext = ExecutorUtil .createExecutor(
44
50
config.integrations.asyncOperationThreadPoolSize,
45
51
getClass.getSimpleName
46
52
)
47
53
48
54
def setAktiivisetKKHaut (hakuOids : Set [String ]): Unit = activeKKHakuOids.set(hakuOids)
49
55
override def receive : Receive = {
56
+ case ah : YtlSyncAllHautNightly =>
57
+ val tunniste = ah.tunniste
58
+ val now = System .currentTimeMillis()
59
+ val lss = lastSyncStart.get()
60
+ val timeToStartNewSync = (lss + minIntervalBetween) < now
61
+ if (timeToStartNewSync) {
62
+ log.info(s " Starting nightly sync for all hakus. Previous run was $lss" )
63
+ lastSyncStart.set(now)
64
+ val resultF = syncAllOneHakuAtATime(tunniste)
65
+ resultF.onComplete {
66
+ case Success (_) =>
67
+ log.info(s " ( $tunniste) Nightly sync for all hakus success! " )
68
+ case Failure (t) =>
69
+ log.error(t, s " ( $tunniste) Nightly sync for all hakus failed... " )
70
+ }
71
+ } else {
72
+ log.warning(s " Not starting nightly sync for all hakus as the previous run was on $lss" )
73
+ }
50
74
case ah : YtlSyncAllHaut =>
51
- val tunniste = " manual_sync_for_all_hakus_ " + System .currentTimeMillis()
75
+ val tunniste = ah.tunniste
52
76
val resultF = syncAllOneHakuAtATime(tunniste)
53
77
resultF.onComplete {
54
78
case Success (_) =>
@@ -58,7 +82,7 @@ class YtlFetchActor(
58
82
}
59
83
sender ! tunniste
60
84
case s : YtlSyncHaku =>
61
- val tunniste = System .currentTimeMillis() + " _manual_sync_for_haku_ " + s.hakuOid
85
+ val tunniste = s.tunniste
62
86
val resultF = fetchAndHandleHakemuksetForSingleHakuF(hakuOid = s.hakuOid, s.tunniste)
63
87
resultF.onComplete {
64
88
case Success (_) =>
@@ -69,7 +93,7 @@ class YtlFetchActor(
69
93
log.info(s " Ytl-sync käynnistetty haulle ${s.hakuOid} tunnisteella $tunniste" )
70
94
resultF pipeTo sender
71
95
case s : YtlSyncSingle =>
72
- val tunniste = System .currentTimeMillis() + " _manual_sync_for_person_ " + s.personOid
96
+ val tunniste = s.tunniste
73
97
val resultF = syncSingle(s.personOid)
74
98
resultF.onComplete {
75
99
case Success (_) =>
0 commit comments