2023-11-20 04:47:09 -07:00
from __future__ import annotations
2023-11-11 03:01:13 -07:00
import configparser
2022-10-31 08:36:45 -06:00
import os
2023-05-15 11:57:11 -06:00
import threading
2023-11-11 09:58:26 -07:00
import re
2022-10-31 08:36:45 -06:00
2023-08-09 01:25:35 -06:00
from modules import shared , errors , cache , scripts
2023-05-28 15:41:12 -06:00
from modules . gitpython_hack import Repo
2023-05-10 00:02:23 -06:00
from modules . paths_internal import extensions_dir , extensions_builtin_dir , script_path # noqa: F401
2022-10-31 08:36:45 -06:00
2023-05-29 01:18:15 -06:00
os . makedirs ( extensions_dir , exist_ok = True )
2023-03-27 01:02:30 -06:00
2022-10-31 08:36:45 -06:00
def active ( ) :
2023-07-28 10:07:35 -06:00
if shared . cmd_opts . disable_all_extensions or shared . opts . disable_all_extensions == " all " :
2023-03-27 10:44:49 -06:00
return [ ]
2023-07-28 10:07:35 -06:00
elif shared . cmd_opts . disable_extra_extensions or shared . opts . disable_all_extensions == " extra " :
2023-03-27 10:44:49 -06:00
return [ x for x in extensions if x . enabled and x . is_builtin ]
else :
return [ x for x in extensions if x . enabled ]
2022-10-31 08:36:45 -06:00
2023-11-20 04:47:09 -07:00
class ExtensionMetadata :
filename = " metadata.ini "
config : configparser . ConfigParser
canonical_name : str
requires : list
def __init__ ( self , path , canonical_name ) :
self . config = configparser . ConfigParser ( )
filepath = os . path . join ( path , self . filename )
2024-01-03 15:16:58 -07:00
# `self.config.read()` will quietly swallow OSErrors (which FileNotFoundError is),
# so no need to check whether the file exists beforehand.
try :
self . config . read ( filepath )
except Exception :
errors . report ( f " Error reading { self . filename } for extension { canonical_name } . " , exc_info = True )
2023-11-20 04:47:09 -07:00
self . canonical_name = self . config . get ( " Extension " , " Name " , fallback = canonical_name )
self . canonical_name = canonical_name . lower ( ) . strip ( )
self . requires = self . get_script_requirements ( " Requires " , " Extension " )
def get_script_requirements ( self , field , section , extra_section = None ) :
""" reads a list of requirements from the config; field is the name of the field in the ini file,
like Requires or Before , and section is the name of the [ section ] in the ini file ; additionally ,
reads more requirements from [ extra_section ] if specified . """
x = self . config . get ( section , field , fallback = ' ' )
if extra_section :
x = x + ' , ' + self . config . get ( extra_section , field , fallback = ' ' )
return self . parse_list ( x . lower ( ) )
def parse_list ( self , text ) :
""" converts a line from config ( " ext1 ext2, ext3 " ) into a python list ([ " ext1 " , " ext2 " , " ext3 " ]) """
if not text :
return [ ]
# both "," and " " are accepted as separator
return [ x for x in re . split ( r " [, \ s]+ " , text . strip ( ) ) if x ]
2022-10-31 08:36:45 -06:00
class Extension :
2023-05-15 11:57:11 -06:00
lock = threading . Lock ( )
2023-07-15 00:20:43 -06:00
cached_fields = [ ' remote ' , ' commit_date ' , ' branch ' , ' commit_hash ' , ' version ' ]
2023-11-20 04:47:09 -07:00
metadata : ExtensionMetadata
2023-05-15 11:57:11 -06:00
2023-11-20 04:47:09 -07:00
def __init__ ( self , name , path , enabled = True , is_builtin = False , metadata = None ) :
2022-10-31 08:36:45 -06:00
self . name = name
self . path = path
self . enabled = enabled
self . status = ' '
self . can_update = False
2022-12-03 08:06:33 -07:00
self . is_builtin = is_builtin
2023-03-29 15:46:03 -06:00
self . commit_hash = ' '
self . commit_date = None
2023-02-13 09:04:34 -07:00
self . version = ' '
2023-03-29 15:46:03 -06:00
self . branch = None
2023-03-27 01:02:30 -06:00
self . remote = None
self . have_info_from_repo = False
2023-11-20 04:47:09 -07:00
self . metadata = metadata if metadata else ExtensionMetadata ( self . path , name . lower ( ) )
self . canonical_name = metadata . canonical_name
2023-11-11 03:01:13 -07:00
2023-07-15 00:20:43 -06:00
def to_dict ( self ) :
return { x : getattr ( self , x ) for x in self . cached_fields }
def from_dict ( self , d ) :
for field in self . cached_fields :
setattr ( self , field , d [ field ] )
2023-03-27 01:02:30 -06:00
def read_info_from_repo ( self ) :
2023-03-29 15:46:03 -06:00
if self . is_builtin or self . have_info_from_repo :
2023-03-27 01:02:30 -06:00
return
2023-07-15 00:20:43 -06:00
def read_from_repo ( ) :
with self . lock :
if self . have_info_from_repo :
return
self . do_read_info_from_repo ( )
return self . to_dict ( )
2023-11-11 09:58:26 -07:00
2023-07-25 05:01:10 -06:00
try :
d = cache . cached_data_for_file ( ' extensions-git ' , self . name , os . path . join ( self . path , " .git " ) , read_from_repo )
self . from_dict ( d )
except FileNotFoundError :
pass
2023-07-25 22:43:38 -06:00
self . status = ' unknown ' if self . status == ' ' else self . status
2022-10-31 08:36:45 -06:00
2023-05-15 11:57:11 -06:00
def do_read_info_from_repo ( self ) :
2022-10-31 08:36:45 -06:00
repo = None
try :
2023-03-27 01:02:30 -06:00
if os . path . exists ( os . path . join ( self . path , " .git " ) ) :
2023-05-28 15:41:12 -06:00
repo = Repo ( self . path )
2022-10-31 08:36:45 -06:00
except Exception :
2023-05-31 10:56:37 -06:00
errors . report ( f " Error reading github repository info from { self . path } " , exc_info = True )
2022-10-31 08:36:45 -06:00
if repo is None or repo . bare :
self . remote = None
else :
2022-11-05 06:04:48 -06:00
try :
2023-03-27 01:02:30 -06:00
self . remote = next ( repo . remote ( ) . urls , None )
2023-05-21 04:30:00 -06:00
commit = repo . head . commit
self . commit_date = commit . committed_date
2023-03-29 15:46:03 -06:00
if repo . active_branch :
self . branch = repo . active_branch . name
2023-05-21 04:30:00 -06:00
self . commit_hash = commit . hexsha
self . version = self . commit_hash [ : 8 ]
2023-03-29 15:46:03 -06:00
2023-05-28 23:54:13 -06:00
except Exception :
2023-05-31 10:56:37 -06:00
errors . report ( f " Failed reading extension data from Git repository ( { self . name } ) " , exc_info = True )
2022-11-05 06:04:48 -06:00
self . remote = None
2022-10-31 08:36:45 -06:00
2023-05-15 11:57:11 -06:00
self . have_info_from_repo = True
2022-10-31 08:36:45 -06:00
def list_files ( self , subdir , extension ) :
dirpath = os . path . join ( self . path , subdir )
if not os . path . isdir ( dirpath ) :
return [ ]
res = [ ]
for filename in sorted ( os . listdir ( dirpath ) ) :
2022-10-31 09:40:47 -06:00
res . append ( scripts . ScriptFile ( self . path , filename , os . path . join ( dirpath , filename ) ) )
2022-10-31 08:36:45 -06:00
res = [ x for x in res if os . path . splitext ( x . path ) [ 1 ] . lower ( ) == extension and os . path . isfile ( x . path ) ]
return res
def check_updates ( self ) :
2023-05-28 15:41:12 -06:00
repo = Repo ( self . path )
2023-02-25 12:15:06 -07:00
for fetch in repo . remote ( ) . fetch ( dry_run = True ) :
2022-10-31 08:36:45 -06:00
if fetch . flags != fetch . HEAD_UPTODATE :
self . can_update = True
2023-03-29 17:32:29 -06:00
self . status = " new commits "
2022-10-31 08:36:45 -06:00
return
2023-03-29 17:32:29 -06:00
try :
origin = repo . rev_parse ( ' origin ' )
if repo . head . commit != origin :
self . can_update = True
self . status = " behind HEAD "
return
except Exception :
self . can_update = False
self . status = " unknown (remote error) "
return
2022-10-31 08:36:45 -06:00
self . can_update = False
self . status = " latest "
2023-03-29 15:46:03 -06:00
def fetch_and_reset_hard ( self , commit = ' origin ' ) :
2023-05-28 15:41:12 -06:00
repo = Repo ( self . path )
2022-11-12 11:44:42 -07:00
# Fix: `error: Your local changes to the following files would be overwritten by merge`,
# because WSL2 Docker set 755 file permissions instead of 644, this results to the error.
2023-02-25 12:15:06 -07:00
repo . git . fetch ( all = True )
2023-03-29 15:46:03 -06:00
repo . git . reset ( commit , hard = True )
2023-03-29 17:32:29 -06:00
self . have_info_from_repo = False
2022-10-31 08:36:45 -06:00
def list_extensions ( ) :
extensions . clear ( )
2023-07-28 10:07:35 -06:00
if shared . cmd_opts . disable_all_extensions :
print ( " *** \" --disable-all-extensions \" arg was used, will not load any extensions *** " )
elif shared . opts . disable_all_extensions == " all " :
2023-03-27 10:04:45 -06:00
print ( " *** \" Disable all extensions \" option was set, will not load any extensions *** " )
2023-07-28 10:07:35 -06:00
elif shared . cmd_opts . disable_extra_extensions :
print ( " *** \" --disable-extra-extensions \" arg was used, will only load built-in extensions *** " )
2023-03-27 10:44:49 -06:00
elif shared . opts . disable_all_extensions == " extra " :
print ( " *** \" Disable all extensions \" option was set, will only load built-in extensions *** " )
2023-03-27 10:04:45 -06:00
2023-11-20 04:47:09 -07:00
loaded_extensions = { }
2023-11-11 03:01:13 -07:00
# scan through extensions directory and load metadata
2023-11-11 03:08:45 -07:00
for dirname in [ extensions_builtin_dir , extensions_dir ] :
2022-12-03 08:06:33 -07:00
if not os . path . isdir ( dirname ) :
2023-11-11 03:01:13 -07:00
continue
2022-10-31 08:36:45 -06:00
2022-12-03 08:06:33 -07:00
for extension_dirname in sorted ( os . listdir ( dirname ) ) :
path = os . path . join ( dirname , extension_dirname )
if not os . path . isdir ( path ) :
continue
2023-11-11 03:01:13 -07:00
canonical_name = extension_dirname
2023-11-20 04:47:09 -07:00
metadata = ExtensionMetadata ( path , canonical_name )
2023-11-11 03:01:13 -07:00
2023-11-20 04:47:09 -07:00
# check for duplicated canonical names
already_loaded_extension = loaded_extensions . get ( metadata . canonical_name )
if already_loaded_extension is not None :
errors . report ( f ' Duplicate canonical name " { canonical_name } " found in extensions " { extension_dirname } " and " { already_loaded_extension . name } " . Former will be discarded. ' , exc_info = False )
continue
2023-11-11 03:01:13 -07:00
2023-11-20 04:47:09 -07:00
is_builtin = dirname == extensions_builtin_dir
extension = Extension ( name = extension_dirname , path = path , enabled = extension_dirname not in shared . opts . disabled_extensions , is_builtin = is_builtin , metadata = metadata )
extensions . append ( extension )
loaded_extensions [ canonical_name ] = extension
2023-11-11 03:01:13 -07:00
2023-11-20 04:47:09 -07:00
# check for requirements
for extension in extensions :
for req in extension . metadata . requires :
required_extension = loaded_extensions . get ( req )
if required_extension is None :
errors . report ( f ' Extension " { extension . name } " requires " { req } " which is not installed. ' , exc_info = False )
2023-11-11 03:01:13 -07:00
continue
2022-12-03 08:06:33 -07:00
2023-11-20 04:47:09 -07:00
if not extension . enabled :
errors . report ( f ' Extension " { extension . name } " requires " { required_extension . name } " which is disabled. ' , exc_info = False )
continue
2023-11-11 03:01:13 -07:00
2023-11-20 04:47:09 -07:00
extensions : list [ Extension ] = [ ]