Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
161 changes: 155 additions & 6 deletions nimp/utils/p4.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@
''' Perforce utilities '''

import argparse
import json
import logging
import os
import os.path
import re
import tempfile

import nimp.sys.process
import nimp.system
Expand Down Expand Up @@ -228,6 +230,22 @@ def reconcile(self, cl_number, *files):

return ret

def reconcile_workspace(self, *paths_to_reconcile, cl_number=None, dry_run=False):
''' Reconciles given workspace '''

p4_reconcile_args = ['-f', '-e', '-a', '-d']
if dry_run:
p4_reconcile_args.append('-n')
if cl_number:
p4_reconcile_args.extend(['-c', cl_number])
for path_to_reconcile in paths_to_reconcile:
if not path_to_reconcile.endswith('...'):
if os.path.isdir(path_to_reconcile) or path_to_reconcile.endswith(('/', '\\')):
path_to_reconcile = os.path.join(path_to_reconcile, '...')
p4_reconcile_args.append(path_to_reconcile)

return self._run_using_arg_file('reconcile', *p4_reconcile_args) is not None

def get_changelist_description(self, cl_number):
''' Returns description of given changelist '''
desc, = next(self._parse_command_output(["describe", cl_number], r"\.\.\. desc (.*)"))
Expand All @@ -248,6 +266,21 @@ def get_last_synced_changelist(self):

return cl_number

def get_file_workspace_current_revision(self, file):
''' Returns the file revision currently synced in the workspace '''
return next(self._parse_command_output(['have', file], r'\.\.\. haveRev (\d+)'), default=None)

def print_file_data(self, file, revision=None):
''' wrapper for p4 print command '''
revision = f"#{revision}" if revision is not None else ''
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it'd be better to leave this up to called as you can use the @file_rev pattern with print

data = None
output = self._run('print', file+revision, use_json_format=True, hide_output=False)
output = [json.loads(json_element) for json_element in output.splitlines() if json_element]
for output_chunk in output:
if 'data' in output_chunk:
data = output_chunk['data']
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing the pattern where print split the file content into multiple {'data': ...} blocks for some reason

return data

def get_or_create_changelist(self, description):
''' Creates or returns changelist number if it's not already created '''
pending_changelists = self.get_pending_changelists()
Expand Down Expand Up @@ -320,7 +353,7 @@ def revert_unchanged(self, cl_number):
output = self._run('revert', '-a', '-c', cl_number, '//...')
return output is not None

def submit(self, cl_number):
def _submit(self, cl_number):
''' Submits given changelist '''
logging.info("Submiting changelist %s...", cl_number)
command = self._get_p4_command('submit', '-f', 'revertunchanged', '-c', cl_number)
Expand All @@ -335,6 +368,73 @@ def submit(self, cl_number):

return True

def submit_default_changelist(self, description=None, revert_unchanged=False, dry_run=False):
''' Submits given changelist '''
logging.info("Submitting default changelist...")
submit_args = []
if revert_unchanged:
submit_args.extend(['-f', 'revertunchanged'])
if description:
# description has to fit on one line, even when using -x arg_file
# there is a perforce limitation to how long the desc can be however, arg_file or not, I tested this.
# p4 command failed: Identifiers too long. Must not be longer than 1024 bytes of UTF-8
# ...happens when using 130000-ish bytes utf8 words
# anything under that seems to be fine, whatever happened to this 1024 bytes limit...
# couldn't find more info on this subject in the perforce documentation
description_limit = 120000
submit_args.extend(['-d', description[:description_limit]])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since the -x arg expect a file with one arg per line.
Should we escape the line endings (\n, \r\n, etc) from the desc?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right.
Removing \n solves the issue. I haven't found way a to esacpe them though, while conserving line breaks.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We're now using the utils.p4.submit(description=p4_desc):

  • It should avoid the escaping hell issue by just updating the default changelist spec before submitting.
  • I'll test some commit messages from our monorepo though to be sure nothing weird happens

if dry_run:
logging.info(f'{self._get_p4_command("submit")} {submit_args}')
return True
return self._run_using_arg_file('submit', *submit_args) is not None


def _get_cl_spec(self, cl_number=None):
command = ['change', '-o']
if cl_number is not None:
command.append(str(cl_number))
# We need the cl_spec with no -z tag, for later use as <p4 change -i>
return self._run(*command, hide_output=False, use_ztag=False)

def _set_cl_spec(self, cl_spec):
output = self._run(*['change', '-i'], stdin=cl_spec)
pattern = r"Change (\d+) (created|updated)"
matches = re.findall(pattern, output, re.MULTILINE)
assert len(matches) == 1
return matches[0]

def _update_cl_spec_field(self, cl_spec, spec_field, field_content):
assert spec_field in ALL_SPEC_FIELDS
possible_following_fields = fr":{os.linesep}|".join(ALL_SPEC_FIELDS) + fr':{os.linesep}'
pattern = re.compile(rf'{spec_field}:{os.linesep}\t(.*){os.linesep}{os.linesep}({possible_following_fields})',
# using dotall flag rather than multiline, that stops at first encountered \r\n
re.DOTALL)
matches = re.findall(pattern, cl_spec)
if not matches:
# It's possible that there is no following field
pattern = re.compile(rf'{spec_field}:{os.linesep}\t(.*)({os.linesep}{os.linesep})', re.DOTALL)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This could be achieved with a $ to the possible_following_fields like such ({possible_following_fields}|$)

matches = re.findall(pattern, cl_spec)
assert len(matches) == 1
initial_desc, following_field = matches[0]
return cl_spec.replace(initial_desc, field_content)
# Replace desc rather than re.sub to preserve utf8 chars like \x7A\x3A
# return re.sub(pattern, rf'{spec_field}:{os.linesep}\t{field_content}{os.linesep}{os.linesep}{following_field}', cl_spec)

def submit(self, cl_number=None, description=None):
''' submit from default if no cl_number is provided
or else submit given cl_number
description can be set or updated '''
assert any(a is not None for a in [cl_number, description])

if description is not None:
cl_spec = self._get_cl_spec(cl_number=cl_number)
tabbed_description = "\n\t".join(line for line in description.splitlines())
cl_spec = self._update_cl_spec_field(cl_spec, SpecField.description, tabbed_description)
cl_number, status = self._set_cl_spec(cl_spec)

return self._submit(cl_number)


def sync(self, *files, cl_number = None):
''' Udpate given file '''
command = ["sync"]
Expand Down Expand Up @@ -370,8 +470,12 @@ def _escape_filename(name):
.replace('#', '%23') \
.replace('*', '%2A')

def _get_p4_command(self, *args):
command = ['p4', '-z', 'tag']
def _get_p4_command(self, *args, use_ztag=True, use_json_format=False):
command = ['p4']
if use_ztag:
command += ['-z', 'tag']
if use_json_format:
command.append('-Mj')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's recommended to always use -ztag with -Mj

if self._port is not None:
command += ['-p', self._port]
if self._user is not None:
Expand All @@ -384,11 +488,12 @@ def _get_p4_command(self, *args):
command += list(args)
return command

def _run(self, *args, stdin=None, hide_output=False):
command = self._get_p4_command(*args)
def _run(self, *args, stdin=None, hide_output=False, use_json_format=False, use_ztag=True):
command = self._get_p4_command(*args, use_json_format=use_json_format, use_ztag=use_ztag)

for _ in range(5):
result, output, error = nimp.sys.process.call(command, stdin=stdin, encoding='cp437', capture_output=True, hide_output=hide_output)
result, output, error = nimp.sys.process.call(
command, stdin=stdin, encoding='cp437', capture_output=True, hide_output=hide_output)

if 'Operation took too long ' in error:
continue
Expand All @@ -405,6 +510,19 @@ def _run(self, *args, stdin=None, hide_output=False):

return output

def _run_using_arg_file(self, command, *command_args):
''' runs p4 <args...> -x arg_file_containing_command_args command '''
# perforce seems unable to handle a tempfile.TemporaryFile():
# p4 command failed: Perforce client error: open for read:
# <temp_file_path>: The process cannot access the file because it is being used by another process
# Use a temp dir instead so the temp file can be used by perforce...
# The dir and file is wiped anyway when exiting the context manager
with tempfile.TemporaryDirectory(prefix="p4_arg_file_") as tmp_dir:
arg_file_path = os.path.normpath(os.path.join(tmp_dir, 'p4_arg_file'))
with open(arg_file_path, 'w') as fp:
fp.write('\n'.join(command_args))
return self._run(*['-x', arg_file_path, command])

def _parse_command_output(self, command, *patterns, stdin = None, hide_output = False):
output = self._run(*command, stdin = stdin, hide_output = hide_output)

Expand All @@ -422,3 +540,34 @@ def _parse_command_output(self, command, *patterns, stdin = None, hide_output =

for elem in zip(*match_list):
yield elem


class SpecField():
change = 'Change'
date = 'Date'
client = 'Client'
user = 'User'
status = 'Status'
type = 'Type'
description = 'Description'
imported_by = 'ImportedBy'
identity = 'Identity'
jobs = 'jobs'
stream = 'Stream'
files= 'Files'


ALL_SPEC_FIELDS = [
SpecField.change,
SpecField.date,
SpecField.client,
SpecField.user,
SpecField.status,
SpecField.type,
SpecField.description,
SpecField.imported_by,
SpecField.identity,
SpecField.jobs,
SpecField.stream,
SpecField.files
]