Mentions légales du service
Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
python-sharelatex
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
sed-rennes
sharelatex
python-sharelatex
Commits
39dbe9ff
Commit
39dbe9ff
authored
3 weeks ago
by
ANDRADE-BARROSO Guillermo
Browse files
Options
Downloads
Patches
Plain Diff
fix get_doc
fix process update_data
parent
aa3310e7
No related branches found
No related tags found
1 merge request
!49
update implementation to be compatible with overleaf version 5.2.1
Pipeline
#1225832
failed
3 weeks ago
Stage: precheck
Stage: test
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
sharelatex/__init__.py
+9
-46
9 additions, 46 deletions
sharelatex/__init__.py
sharelatex/cli.py
+20
-6
20 additions, 6 deletions
sharelatex/cli.py
with
29 additions
and
52 deletions
sharelatex/__init__.py
+
9
−
46
View file @
39dbe9ff
...
...
@@ -905,50 +905,21 @@ class SyncClient:
# use thread local storage to pass the project data
storage
=
threading
.
local
()
storage
.
is_data
=
False
class
Namespace
(
BaseNamespace
):
"""
Namespace.
"""
def
on_connect
(
self
)
->
None
:
"""
on_connect.
"""
logger
.
debug
(
"
[Connected] Yeah !!
"
)
def
on_reconnect
(
self
)
->
None
:
"""
on_reconnect.
"""
logger
.
debug
(
"
[Reconnected] re-Yeah !!
"
)
def
on_disconnect
(
self
)
->
None
:
"""
on_disconnect.
"""
logger
.
debug
(
"
[Disconnected] snif!
"
)
def
on_connection_rejected
(
*
args
:
Any
)
->
None
:
"""
on_connection_rejected.
"""
logger
.
debug
(
"
[connectionRejected] oh !!!
"
)
headers
=
{
"
Referer
"
:
url
}
headers
.
update
(
self
.
headers
)
with
SocketIO
(
self
.
base_url
,
params
=
{
"
projectId
"
:
project_id
},
verify
=
self
.
verify
,
Namespace
=
Namespace
,
Namespace
=
Logging
Namespace
,
cookies
=
self
.
cookie
,
headers
=
headers
,
)
as
socketIO
:
def
on_joint_doc
(
*
args
:
Any
)
->
None
:
"""
on_joint_doc.
"""
logger
.
debug
(
"
[socketIO] join doc ok
"
)
# transform list of str (lines) as bytes for finally decode as
# utf-8 list of str
storage
.
doc_data
=
[
...
...
@@ -960,24 +931,15 @@ class SyncClient:
"""
on_joint_project.
"""
storage
.
project_data
=
args
[
1
]
storage
.
project_data
=
args
[
0
]
socketIO
.
emit
(
"
joinDoc
"
,
doc_id
,
{
"
encodeRanges
"
:
True
},
on_joint_doc
)
def
on_connection_accepted
(
*
args
:
Any
)
->
None
:
"""
on_connection_accepted.
"""
logger
.
debug
(
"
[connectionAccepted] Waoh !!!
"
)
socketIO
.
emit
(
"
joinProject
"
,
{
"
project_id
"
:
project_id
},
on_joint_project
)
socketIO
.
on
(
"
connectionAccepted
"
,
on_connection_accepted
)
socketIO
.
on
(
"
connectionRejected
"
,
on_connection_rejected
)
socketIO
.
on
(
"
joinProjectResponse
"
,
on_joint_project
)
while
not
storage
.
is_data
:
logger
.
debug
(
"
[socketIO] wait for
doc
data
"
)
logger
.
debug
(
"
[socketIO] wait for
project
data
"
)
socketIO
.
wait
(
0.1
)
logger
.
debug
(
"
[socketIO] wait for doc data finish !
"
)
logger
.
debug
(
"
[socketIO] wait for project data finish !
"
)
# NOTE(msimonin): Check return type
if
dest_path
is
None
:
return
"
\n
"
.
join
(
storage
.
doc_data
)
...
...
@@ -1106,6 +1068,7 @@ class SyncClient:
"
qqfilename
"
:
path_as_path
.
name
,
"
qqtotalfilesize
"
:
os
.
path
.
getsize
(
path_as_path
),
}
breakpoint
()
r
=
self
.
_post
(
url
,
params
=
params
,
files
=
files
,
verify
=
self
.
verify
)
r
.
raise_for_status
()
response
=
r
.
json
()
...
...
This diff is collapsed.
Click to expand it.
sharelatex/cli.py
+
20
−
6
View file @
39dbe9ff
...
...
@@ -666,7 +666,6 @@ def _sync_remote_docs(
update_data
:
UpdateDatum
,
datetimes_dict
:
Mapping
[
str
,
datetime
.
datetime
],
)
->
None
:
breakpoint
()
remote_docs
=
(
item
for
item
in
remote_items
if
item
[
"
type
"
]
==
"
doc
"
)
logger
.
debug
(
"
check if remote documents are newer that locals
"
)
remote_time
=
datetime
.
datetime
.
now
(
datetime
.
timezone
.
utc
)
...
...
@@ -687,11 +686,26 @@ def _sync_remote_docs(
local_time
=
datetime
.
datetime
.
fromtimestamp
(
local_path
.
stat
().
st_mtime
,
datetime
.
timezone
.
utc
)
updates
=
[
update
[
"
meta
"
][
"
end_ts
"
]
for
update
in
update_data
[
"
updates
"
]
if
doc_id
in
update
[
"
docs
"
]
]
if
update_data
[
"
updates
"
]:
# check if have a new updates data structure
if
"
pathnames
"
in
update_data
[
"
updates
"
][
0
]:
updates
=
[]
for
update
in
update_data
[
"
updates
"
]:
if
relative_path
in
update
[
"
pathnames
"
]:
updates
.
append
(
update
[
"
meta
"
][
"
end_ts
"
])
else
:
for
op
in
update
[
"
project_ops
"
]:
for
v
in
op
.
values
():
if
type
(
v
)
is
dict
:
if
"
pathname
"
in
v
:
if
relative_path
==
v
[
"
pathname
"
]:
updates
.
append
(
update
[
"
meta
"
][
"
end_ts
"
])
else
:
updates
=
[
update
[
"
meta
"
][
"
end_ts
"
]
for
update
in
update_data
[
"
updates
"
]
if
doc_id
in
update
[
"
docs
"
]
]
if
len
(
updates
)
>
0
:
remote_time
=
datetime
.
datetime
.
fromtimestamp
(
updates
[
0
]
/
1000
,
datetime
.
timezone
.
utc
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment