version 1.28, 2003/02/03 05:39:37
|
version 1.85, 2016/06/19 04:28:08
|
Line 6
|
Line 6
|
# |
# |
# Copyright Michigan State University Board of Trustees |
# Copyright Michigan State University Board of Trustees |
# |
# |
# This file is part of the LearningOnline Network with a |
# This file is part of the LearningOnline Network with CAPA (LON-CAPA). |
# Computer assisted personalized approach (loncapa). |
|
# |
# |
# Loncapa is free software; you can redistribute it and/or modify |
# LON-CAPA is free software; you can redistribute it and/or modify |
# it under the terms of the GNU General Public License as published by |
# it under the terms of the GNU General Public License as published by |
# the Free Software Foundation; either version 2 of the License, or |
# the Free Software Foundation; either version 2 of the License, or |
# (at your option) any later version. |
# (at your option) any later version. |
# |
# |
# Loncapa is distributed in the hope that it will be useful, |
# LON-CAPA is distributed in the hope that it will be useful, |
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
# GNU General Public License for more details. |
# GNU General Public License for more details. |
# |
# |
# You should have received a copy of the GNU General Public License |
# You should have received a copy of the GNU General Public License |
# along with loncapa; if not, write to the Free Software |
# along with LON-CAPA; if not, write to the Free Software |
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
# |
# |
# /home/httpd/html/adm/gpl.txt |
# /home/httpd/html/adm/gpl.txt |
# |
# |
# http://www.loncapa.org/ |
# http://www.lon-capa.org/ |
# |
|
# YEAR=2001 |
|
# 04/14/2001, 04/16/2001 Scott Harrison |
|
# |
|
# YEAR=2002 |
|
# 05/11/2002 Scott Harrison |
|
# |
|
# YEAR=2003 |
|
# Scott Harrison |
|
# |
# |
### |
### |
|
|
Line 53 filesystem installation location: F</etc
|
Line 43 filesystem installation location: F</etc
|
Here is the cron job entry. |
Here is the cron job entry. |
|
|
C<# Repopulate and refresh the metadata database used for the search catalog.> |
C<# Repopulate and refresh the metadata database used for the search catalog.> |
|
|
C<10 1 * * 7 www /home/httpd/perl/searchcat.pl> |
C<10 1 * * 7 www /home/httpd/perl/searchcat.pl> |
|
|
This script only allows itself to be run as the user C<www>. |
This script only allows itself to be run as the user C<www>. |
Line 66 The metadata is entered into a SQL datab
|
Line 55 The metadata is entered into a SQL datab
|
This script also does general database maintenance such as reformatting |
This script also does general database maintenance such as reformatting |
the C<loncapa:metadata> table if it is deprecated. |
the C<loncapa:metadata> table if it is deprecated. |
|
|
This script also builds dynamic temporal metadata and stores this inside |
This script evaluates dynamic metadata from the authors' |
a F<nohist_resevaldata.db> database file. |
F<nohist_resevaldata.db> database file in order to store it in MySQL. |
|
|
This script is playing an increasingly important role for a loncapa |
This script is playing an increasingly important role for a loncapa |
library server. The proper operation of this script is critical for a smooth |
library server. The proper operation of this script is critical for a smooth |
Line 75 and correct user experience.
|
Line 64 and correct user experience.
|
|
|
=cut |
=cut |
|
|
# ========================================================== Setting things up. |
use strict; |
|
use DBI; |
# ------------------------------------------------------ Use external modules. |
|
|
|
use lib '/home/httpd/lib/perl/'; |
use lib '/home/httpd/lib/perl/'; |
use LONCAPA::Configuration; |
use LONCAPA::lonmetadata; |
|
use LONCAPA; |
|
use Getopt::Long; |
use IO::File; |
use IO::File; |
use HTML::TokeParser; |
use HTML::TokeParser; |
use DBI; |
|
use GDBM_File; |
use GDBM_File; |
use POSIX qw(strftime mktime); |
use POSIX qw(strftime mktime); |
|
use Mail::Send; |
|
use Apache::loncommon(); |
|
|
# ----------------- Code to enable 'find' subroutine listing of the .meta files |
use Apache::lonnet(); |
use File::Find; |
|
|
|
# List of .meta files (used on a per-user basis). |
use File::Find; |
my @metalist; |
|
|
|
# --------------- Read loncapa_apache.conf and loncapa.conf and get variables. |
# |
my $perlvarref = LONCAPA::Configuration::read_conf('loncapa.conf'); |
# Set up configuration options |
my %perlvar = %{$perlvarref}; |
my ($simulate,$oneuser,$help,$verbose,$logfile,$debug); |
undef($perlvarref); # Remove since sensitive and not needed. |
GetOptions ( |
delete($perlvar{'lonReceipt'}); # Remove since sensitive and not needed. |
'help' => \$help, |
|
'simulate' => \$simulate, |
# ------------------------------------- Only run if machine is a library server |
'only=s' => \$oneuser, |
if ($perlvar{'lonRole'} ne 'library') |
'verbose=s' => \$verbose, |
{ |
'debug' => \$debug, |
exit(0); |
); |
} |
|
|
if ($help) { |
# ------------------------------ Make sure this process is running as user=www. |
print <<"ENDHELP"; |
my $wwwid = getpwnam('www'); |
$0 |
if ($wwwid != $<) |
Rebuild and update the LON-CAPA metadata database. |
{ |
Options: |
$emailto = "$perlvar{'lonAdmEMail'},$perlvar{'lonSysEMail'}"; |
-help Print this help |
$subj = "LON: $perlvar{'lonHostID'} User ID mismatch"; |
-simulate Do not modify the database. |
system("echo 'User ID mismatch. searchcat.pl must be run as user www.' | ". |
-only=user Only compute for the given user. Implies -simulate |
"mailto $emailto -s '$subj' > /dev/null"); |
-verbose=val Sets logging level, val must be a number |
exit(1); |
-debug Turns on debugging output |
} |
ENDHELP |
|
exit 0; |
# ------------------------------------------------------ Initialize log output. |
} |
open(LOG,'>'.$perlvar{'lonDaemons'}.'/logs/searchcat.log'); |
|
print(LOG '==== Searchcat Run '.localtime().' ===='."\n\n"); |
if (! defined($debug)) { |
|
$debug = 0; |
my $dbh; # Database object reference handle. |
} |
|
|
# ----------------------------- Verify connection to loncapa:metadata database. |
if (! defined($verbose)) { |
unless ( |
$verbose = 0; |
$dbh = DBI->connect('DBI:mysql:loncapa','www', |
} |
$perlvar{'lonSqlAccess'}, |
|
{ RaiseError => 0,PrintError => 0}) |
if (defined($oneuser)) { |
) |
$simulate=1; |
{ |
} |
print(LOG '**** ERROR **** Cannot connect to database!'."\n"); |
|
exit(0); |
## |
} |
## Use variables for table names so we can test this routine a little easier |
|
my %oldnames = ( |
# ------------------------------ Create loncapa:metadata table if non-existent. |
'metadata' => 'metadata', |
my $make_metadata_table = 'CREATE TABLE IF NOT EXISTS metadata ('. |
'portfolio' => 'portfolio_metadata', |
'title TEXT, author TEXT, subject TEXT, url TEXT, keywords TEXT, '. |
'access' => 'portfolio_access', |
'version TEXT, notes TEXT, abstract TEXT, mime TEXT, language TEXT, '. |
'addedfields' => 'portfolio_addedfields', |
'creationdate DATETIME, lastrevisiondate DATETIME, owner TEXT, '. |
'allusers' => 'allusers', |
'copyright TEXT, utilitysemaphore BOOL, FULLTEXT idx_title (title), '. |
); |
'FULLTEXT idx_author (author), FULLTEXT idx_subject (subject), '. |
|
'FULLTEXT idx_url (url), FULLTEXT idx_keywords (keywords), '. |
my %newnames; |
'FULLTEXT idx_version (version), FULLTEXT idx_notes (notes), '. |
# new table names - append pid to have unique temporary tables |
'FULLTEXT idx_abstract (abstract), FULLTEXT idx_mime (mime), '. |
foreach my $key (keys(%oldnames)) { |
'FULLTEXT idx_language (language), FULLTEXT idx_owner (owner), '. |
$newnames{$key} = 'new'.$oldnames{$key}.$$; |
'FULLTEXT idx_copyright (copyright)) TYPE=MYISAM'; |
} |
|
|
$dbh->do($make_metadata_table); # Generate the table. |
# |
|
# Only run if machine is a library server |
# ----------------------------- Verify format of the loncapa:metadata database. |
exit if ($Apache::lonnet::perlvar{'lonRole'} ne 'library'); |
# (delete and recreate database if necessary). |
my $hostid = $Apache::lonnet::perlvar{'lonHostID'}; |
|
|
# Make a positive control for verifying table structure. |
# |
my $make_metadata_table_CONTROL = $make_metadata_table; |
# Make sure this process is running from user=www |
$make_metadata_table_CONTROL =~ |
my $wwwid=getpwnam('www'); |
s/^(CREATE TABLE IF NOT EXISTS) metadata/$1 CONTROL_metadata/; |
if ($wwwid!=$<) { |
|
my $emailto="$Apache::lonnet::perlvar{'lonAdmEMail'},$Apache::lonnet::perlvar{'lonSysEMail'}"; |
$dbh->do('DROP TABLE IF EXISTS CONTROL_metadata'); |
my $subj="LON: $Apache::lonnet::perlvar{'lonHostID'} User ID mismatch"; |
$dbh->do($make_metadata_table_CONTROL); |
system("echo 'User ID mismatch. searchcat.pl must be run as user www.' |\ |
|
mail -s '$subj' $emailto > /dev/null"); |
my $table_description; # selectall reference to the table description. |
exit 1; |
|
} |
my $CONTROL_table_string; # What the table description should look like. |
# |
my $table_string; # What the table description does look like. |
# Let people know we are running |
|
open(LOG,'>>'.$Apache::lonnet::perlvar{'lonDaemons'}.'/logs/searchcat.log'); |
# Calculate the CONTROL table description (what it should be). |
&log(0,'==== Searchcat Run '.localtime()."===="); |
$table_description = $dbh->selectall_arrayref('describe CONTROL_metadata'); |
|
foreach my $table_row (@{$table_description}) |
|
{ |
if ($debug) { |
$CONTROL_table_string .= join(',',@{$table_row})."\n"; |
&log(0,'simulating') if ($simulate); |
} |
&log(0,'only processing user '.$oneuser) if ($oneuser); |
|
&log(0,'verbosity level = '.$verbose); |
# Calculate the current table description (what it currently looks like). |
} |
$table_description = $dbh->selectall_arrayref('describe metadata'); |
# |
foreach my $table_row (@{$table_description}) |
# Connect to database |
{ |
my $dbh; |
$table_string .= join(',',@{$table_row})."\n"; |
if (! ($dbh = DBI->connect("DBI:mysql:loncapa","www",$Apache::lonnet::perlvar{'lonSqlAccess'}, |
} |
{ RaiseError =>0,PrintError=>0}))) { |
|
&log(0,"Cannot connect to database!"); |
if ($table_string ne $CONTROL_table_string) |
die "MySQL Error: Cannot connect to database!\n"; |
{ |
} |
# Log this incident. |
# This can return an error and still be okay, so we do not bother checking. |
print(LOG '**** WARNING **** Table structure mismatch, need to regenerate'. |
# (perhaps it should be more robust and check for specific errors) |
'.'."\n"); |
foreach my $key (keys(%newnames)) { |
# Delete the table. |
if ($newnames{$key} ne '') { |
$dbh->do('DROP TABLE IF EXISTS metadata'); |
$dbh->do('DROP TABLE IF EXISTS '.$newnames{$key}); |
# Generate the table. |
} |
$dbh->do($make_metadata_table); |
} |
} |
|
|
# |
$dbh->do('DROP TABLE IF EXISTS CONTROL_metadata'); # Okay. Done with control. |
# Create the new metadata, portfolio and allusers tables |
|
foreach my $key (keys(%newnames)) { |
# ----------------------------------------------- Set utilitysemaphore to zero. |
if ($newnames{$key} ne '') { |
$dbh->do('UPDATE metadata SET utilitysemaphore = 0'); |
my $request = |
|
&LONCAPA::lonmetadata::create_metadata_storage($newnames{$key},$oldnames{$key}); |
# ========================================================= Main functionality. |
$dbh->do($request); |
|
if ($dbh->err) { |
# - Determine home authors on this server based on resources dir and user tree. |
$dbh->disconnect(); |
|
&log(0,"MySQL Error Create: ".$dbh->errstr); |
# RESOURCES: the resources directory (subdirs correspond to author usernames). |
die $dbh->errstr; |
opendir(RESOURCES,"$perlvar{'lonDocRoot'}/res/$perlvar{'lonDefDomain'}") or |
} |
(print(LOG '=== /res/--lonDefDomain-- directory is not accessible'."\n") |
} |
and exit(0)); |
} |
|
|
# query_home_server_status will look for user home directories on this machine. |
# |
my @homeusers = |
# find out which users we need to examine |
grep {&query_home_server_status($perlvar{'lonDocRoot'}.'/res/'. |
my @domains = sort(&Apache::lonnet::current_machine_domains()); |
$perlvar{'lonDefDomain'}.'/'.$_) |
&log(9,'domains ="'.join('","',@domains).'"'); |
} grep {!/^\.\.?$/} readdir(RESOURCES); |
|
closedir(RESOURCES); |
foreach my $dom (@domains) { |
|
&log(9,'domain = '.$dom); |
unless (@homeusers) |
opendir(RESOURCES,"$Apache::lonnet::perlvar{'lonDocRoot'}/res/$dom"); |
{ |
my @homeusers = |
print(LOG '=== No home users found on this server.'."\n"); |
grep { |
} |
&ishome("$Apache::lonnet::perlvar{'lonDocRoot'}/res/$dom/$_"); |
|
} grep { |
# Consider each author individually. |
!/^\.\.?$/; |
foreach my $user (@homeusers) |
} readdir(RESOURCES); |
{ |
closedir RESOURCES; |
# Make a log entry. |
&log(5,'users = '.$dom.':'.join(',',@homeusers)); |
print(LOG "\n".'=== User: '.$user."\n\n"); |
# |
|
my %courses; |
# Get filesystem path to this user's directory. |
if ($oneuser) { |
my $user_directory = |
%courses = &courseiddump($dom,'.',1,'.','.',$oneuser,undef, |
&construct_path_to_user_directory($perlvar{'lonDefDomain'},$user); |
undef,'.'); |
|
@homeusers=($oneuser); |
# Remove left-over db-files from a potentially crashed searchcat run. |
} else { |
unlink($user_directory.'/nohist_new_resevaldata.db'); |
# get courseIDs for domain on current machine |
|
%courses=&Apache::lonnet::courseiddump($dom,'.',1,'.','.','.',1,[$hostid],'.'); |
# Cleanup the metalist array. |
} |
undef(@metalist); |
|
@metalist = (); |
# |
|
# Loop through the users |
# This will add entries to the @metalist array. |
foreach my $user (@homeusers) { |
&File::Find::find(\&wanted, |
next if (exists($courses{$dom.'_'.$user})); |
$perlvar{'lonDocRoot'}.'/res/'. |
&log(0,"=== User: ".$user); |
$perlvar{'lonDefDomain'}.'/'.$user); |
&process_dynamic_metadata($user,$dom); |
|
# |
# -- process file to get metadata and put into search catalog SQL database |
# Use File::Find to get the files we need to read/modify |
# Also, build and store dynamic metadata. |
find( |
# Also, delete record entries before refreshing. |
{preprocess => \&only_meta_files, |
foreach my $m (@metalist) |
#wanted => \&print_filename, |
{ |
#wanted => \&log_metadata, |
# Log this action. |
wanted => \&process_meta_file, |
print(LOG "- ".$m."\n"); |
no_chdir => 1, |
|
}, join('/',($Apache::lonnet::perlvar{'lonDocRoot'},'res',$dom,$user)) ); |
# Get metadata from the file. |
} |
my $ref = get_metadata_from_file($m); |
# Search for all users and public portfolio files |
|
my (%allusers,%portusers); |
# Make a datarecord identifier for this resource. |
if ($oneuser) { |
my $m2 = '/res/'.declutter($m); |
%portusers = ( |
$m2 =~ s/\.meta$//; |
$oneuser => '', |
|
); |
# Build and store dynamic metadata inside nohist_resevaldata.db. |
%allusers = ( |
build_on_the_fly_dynamic_metadata($m2); |
$oneuser => '', |
|
); |
# Delete record if it already exists. |
} else { |
my $q2 = 'select * from metadata where url like binary '."'".$m2."'"; |
my $dir = $Apache::lonnet::perlvar{lonUsersDir}.'/'.$dom; |
my $sth = $dbh->prepare($q2); |
&descend_tree($dom,$dir,0,\%portusers,\%allusers); |
$sth->execute(); |
} |
my $r1 = $sth->fetchall_arrayref; |
foreach my $uname (keys(%portusers)) { |
if (@$r1) |
my $urlstart = '/uploaded/'.$dom.'/'.$uname; |
{ |
my $pathstart = &propath($dom,$uname).'/userfiles'; |
$sth = |
my $is_course = ''; |
$dbh->prepare('delete from metadata where url like binary '. |
if (exists($courses{$dom.'_'.$uname})) { |
"'".$m2."'"); |
$is_course = 1; |
$sth->execute(); |
} |
} |
my $curr_perm = &Apache::lonnet::get_portfile_permissions($dom,$uname); |
|
my %access = &Apache::lonnet::get_access_controls($curr_perm); |
# Add new/replacement record into the loncapa:metadata table. |
foreach my $file (keys(%access)) { |
$sth = $dbh->prepare('insert into metadata values ('. |
my ($group,$url,$fullpath); |
'"'.delete($ref->{'title'}).'"'.','. |
if ($is_course) { |
'"'.delete($ref->{'author'}).'"'.','. |
($group, my ($path)) = ($file =~ /^(\w+)(\/.+)$/); |
'"'.delete($ref->{'subject'}).'"'.','. |
$fullpath = $pathstart.'/groups/'.$group.'/portfolio'.$path; |
'"'.$m2.'"'.','. |
$url = $urlstart.'/groups/'.$group.'/portfolio'.$path; |
'"'.delete($ref->{'keywords'}).'"'.','. |
} else { |
'"'.'current'.'"'.','. |
$fullpath = $pathstart.'/portfolio'.$file; |
'"'.delete($ref->{'notes'}).'"'.','. |
$url = $urlstart.'/portfolio'.$file; |
'"'.delete($ref->{'abstract'}).'"'.','. |
} |
'"'.delete($ref->{'mime'}).'"'.','. |
if (ref($access{$file}) eq 'HASH') { |
'"'.delete($ref->{'language'}).'"'.','. |
my %portaccesslog = |
'"'.sql_formatted_time( |
&LONCAPA::lonmetadata::process_portfolio_access_data($dbh, |
delete($ref->{'creationdate'})).'"'.','. |
$simulate,\%newnames,$url,$fullpath,$access{$file}); |
'"'.sql_formatted_time( |
&portfolio_logging(%portaccesslog); |
delete($ref->{'lastrevisiondate'})).'"'.','. |
} |
'"'.delete($ref->{'owner'}).'"'.','. |
my %portmetalog = &LONCAPA::lonmetadata::process_portfolio_metadata($dbh,$simulate,\%newnames,$url,$fullpath,$is_course,$dom,$uname,$group); |
'"'.delete($ref->{'copyright'}).'"'.','. |
&portfolio_logging(%portmetalog); |
'1'.')'); |
} |
$sth->execute(); |
} |
} |
my %duplicates; |
|
my %names_by_id = ( |
# ----------------------- Clean up database, remove stale SQL database records. |
id => {}, |
$dbh->do('DELETE FROM metadata WHERE utilitysemaphore = 0'); |
clickers => {}, |
|
); |
# -------------------------------------------------- Copy over the new db-files |
my %ids_by_name = ( |
system('mv '.$user_directory.'/nohist_new_resevaldata.db '. |
id => {}, |
$user_directory.'/nohist_resevaldata.db'); |
clickers => {}, |
} |
); |
|
my %idstodelete = ( |
# --------------------------------------------------- Close database connection |
id => {}, |
$dbh->disconnect; |
clickers => {}, |
print LOG "\n==== Searchcat completed ".localtime()." ====\n"; |
); |
|
my %idstoadd = ( |
|
id => {}, |
|
clickers => {}, |
|
); |
|
my %namespace = ( |
|
id => 'ids', |
|
clickers => 'clickers', |
|
); |
|
my %idtext = ( |
|
id => 'employee/student IDs', |
|
clickers => 'clicker IDs', |
|
); |
|
unless ($simulate || $oneuser) { |
|
foreach my $key ('id','clickers') { |
|
my $hashref = &tie_domain_hash($dom,$namespace{$key},&GDBM_WRCREAT()); |
|
if (ref($hashref) eq 'HASH') { |
|
while (my ($id,$unamestr) = each(%{$hashref}) ) { |
|
$id = &unescape($id); |
|
$unamestr = &unescape($unamestr); |
|
if ($key eq 'clickers') { |
|
my @unames = split(/,/,$unamestr); |
|
foreach my $uname (@unames) { |
|
push(@{$ids_by_name{$key}{$uname}},$id); |
|
} |
|
$names_by_id{$key}{$id} = $unamestr; |
|
} else { |
|
$names_by_id{$key}{$id} = $unamestr; |
|
push(@{$ids_by_name{$key}{$unamestr}},$id); |
|
} |
|
} |
|
&untie_domain_hash($hashref); |
|
} |
|
} |
|
} |
|
# Update allusers |
|
foreach my $uname (keys(%allusers)) { |
|
next if (exists($courses{$dom.'_'.$uname})); |
|
my %userdata = |
|
&Apache::lonnet::get('environment',['firstname','lastname', |
|
'middlename','generation','id','permanentemail','clickers'], |
|
$dom,$uname); |
|
unless ($simulate || $oneuser) { |
|
foreach my $key ('id','clickers') { |
|
my %addid = (); |
|
if ($userdata{$key} ne '') { |
|
my $idfromenv = $userdata{$key}; |
|
if ($key eq 'id') { |
|
$idfromenv=~tr/A-Z/a-z/; |
|
$addid{$idfromenv} = 1; |
|
} else { |
|
$idfromenv =~ s/^\s+//; |
|
$idfromenv =~ s/\s+$//; |
|
map { $addid{$_} = 1; } split(/,/,$idfromenv); |
|
} |
|
} |
|
if (ref($ids_by_name{$key}{$uname}) eq 'ARRAY') { |
|
if (scalar(@{$ids_by_name{$key}{$uname}}) > 1) { |
|
&log(0,"Multiple $idtext{$key} found in $namespace{$key}.db for $uname:$dom -- ". |
|
join(', ',@{$ids_by_name{$key}{$uname}})); |
|
} |
|
foreach my $id (@{$ids_by_name{$key}{$uname}}) { |
|
if ($addid{$id}) { |
|
delete($addid{$id}); |
|
} else { |
|
if ($key eq 'id') { |
|
$idstodelete{$key}{$id} = $uname; |
|
} else { |
|
$idstodelete{$key}{$id} .= $uname.','; |
|
} |
|
} |
|
} |
|
} |
|
if (keys(%addid)) { |
|
foreach my $id (keys(%addid)) { |
|
if ($key eq 'id') { |
|
if (exists($idstoadd{$key}{$id})) { |
|
push(@{$duplicates{$id}},$uname); |
|
} else { |
|
$idstoadd{$key}{$id} = $uname; |
|
} |
|
} else { |
|
$idstoadd{$key}{$id} .= $uname.','; |
|
} |
|
} |
|
} |
|
} |
|
} |
|
|
|
$userdata{'username'} = $uname; |
|
$userdata{'domain'} = $dom; |
|
my %alluserslog = |
|
&LONCAPA::lonmetadata::process_allusers_data($dbh,$simulate, |
|
\%newnames,$uname,$dom,\%userdata); |
|
foreach my $item (keys(%alluserslog)) { |
|
&log(0,$alluserslog{$item}); |
|
} |
|
} |
|
unless ($simulate || $oneuser) { |
|
foreach my $key ('id','clickers') { |
|
if (keys(%{$idstodelete{$key}}) > 0) { |
|
my %resulthash; |
|
if ($key eq 'id') { |
|
%resulthash = &Apache::lonnet::iddel($dom,$idstodelete{$key},$hostid,$namespace{$key}); |
|
} else { |
|
foreach my $delid (sort(keys(%{$idstodelete{$key}}))) { |
|
$idstodelete{$key}{$delid} =~ s/,$//; |
|
} |
|
%resulthash = &Apache::lonnet::iddel($dom,$idstodelete{$key},$hostid,$namespace{$key}); |
|
} |
|
if ($resulthash{$hostid} eq 'ok') { |
|
foreach my $id (sort(keys(%{$idstodelete{$key}}))) { |
|
&log(0,"Record deleted from $namespace{$key}.db for $dom -- $id => ".$idstodelete{$key}{$id}); |
|
} |
|
} else { |
|
&log(0,"Error: '$resulthash{$hostid}' occurred when attempting to delete records from $namespace{$key}.db for $dom"); |
|
} |
|
} |
|
if (keys(%{$idstoadd{$key}}) > 0) { |
|
my $idmessage = ''; |
|
my %newids; |
|
if ($key eq 'id') { |
|
foreach my $addid (sort(keys(%{$idstoadd{$key}}))) { |
|
if ((exists($names_by_id{$key}{$addid})) && ($names_by_id{$key}{$addid} ne $idstoadd{$key}{$addid}) && !($idstodelete{$key}{$addid})) { |
|
&log(0,"Two usernames associated with a single ID $addid in domain: $dom: $names_by_id{$key}{$addid} (current) and $idstoadd{$key}{$addid}\n"); |
|
$idmessage .= "$addid,$names_by_id{$key}{$addid},$idstoadd{$key}{$addid}\n"; |
|
} else { |
|
$newids{$addid} = $idstoadd{$key}{$addid}; |
|
} |
|
} |
|
} else { |
|
foreach my $addid (sort(keys(%{$idstoadd{$key}}))) { |
|
$idstoadd{$key}{$addid} =~ s/,$//; |
|
$newids{$addid} = $idstoadd{$key}{$addid}; |
|
} |
|
} |
|
if (keys(%newids) > 0) { |
|
my $putresult; |
|
if ($key eq 'clickers') { |
|
$putresult = &Apache::lonnet::updateclickers($dom,'add',\%newids,$hostid); |
|
} else { |
|
$putresult = &Apache::lonnet::put_dom($namespace{$key},\%newids,$dom,$hostid); |
|
} |
|
if ($putresult eq 'ok') { |
|
foreach my $id (sort(keys(%newids))) { |
|
&log(0,"Record added to $namespace{$key}.db for $dom -- $id => ".$newids{$id}); |
|
} |
|
} else { |
|
&log(0,"Error: '$putresult' occurred when attempting to add records to $namespace{$key}.db for $dom"); |
|
} |
|
} |
|
if ($idmessage) { |
|
my $to = &Apache::loncommon::build_recipient_list(undef,'idconflictsmail',$dom); |
|
if ($to ne '') { |
|
my $msg = new Mail::Send; |
|
$msg->to($to); |
|
$msg->subject('LON-CAPA studentIDs conflict'); |
|
my $lonhost = $Apache::lonnet::perlvar{'lonHostID'}; |
|
my $hostname = &Apache::lonnet::hostname($lonhost); |
|
my $replytoaddress = 'do-not-reply@'.$hostname; |
|
$msg->add('Reply-to',$replytoaddress); |
|
$msg->add('From','www@'.$hostname); |
|
$msg->add('Content-type','text/plain; charset=UTF-8'); |
|
if (my $fh = $msg->open()) { |
|
print $fh |
|
'The following IDs are used for more than one user in your domain:'."\n". |
|
'Each row contains: Student/Employee ID, Current username in ids.db file, '. |
|
'Additional username'."\n\n". |
|
$idmessage; |
|
$fh->close; |
|
} |
|
} |
|
} |
|
} |
|
} |
|
if (keys(%duplicates) > 0) { |
|
foreach my $id (sort(keys(%duplicates))) { |
|
if (ref($duplicates{$id}) eq 'ARRAY') { |
|
&log(0,"Duplicate IDs found for entries to add to ids.db in $dom -- $id => ".join(',',@{$duplicates{$id}})); |
|
} |
|
} |
|
} |
|
} |
|
} |
|
|
|
# |
|
# Rename the tables |
|
if (! $simulate) { |
|
foreach my $key (keys(%oldnames)) { |
|
if (($oldnames{$key} ne '') && ($newnames{$key} ne '')) { |
|
$dbh->do('DROP TABLE IF EXISTS '.$oldnames{$key}); |
|
if (! $dbh->do('RENAME TABLE '.$newnames{$key}.' TO '.$oldnames{$key})) { |
|
&log(0,"MySQL Error Rename: ".$dbh->errstr); |
|
die $dbh->errstr; |
|
} else { |
|
&log(1,"MySQL table rename successful for $key."); |
|
} |
|
} |
|
} |
|
} |
|
if (! $dbh->disconnect) { |
|
&log(0,"MySQL Error Disconnect: ".$dbh->errstr); |
|
die $dbh->errstr; |
|
} |
|
## |
|
## Finished! |
|
&log(0,"==== Searchcat completed ".localtime()." ===="); |
close(LOG); |
close(LOG); |
exit(0); |
|
|
|
# ================================================================ Subroutines. |
|
|
|
=pod |
|
|
|
=head1 SUBROUTINES |
|
|
|
=cut |
|
|
|
=pod |
|
|
|
B<unescape> - translate to unstrange escaped syntax to strange characters. |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$str> - string with unweird characters. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<string> - string with potentially weird characters. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub unescape ($) |
|
{ |
|
my $str = shift(@_); |
|
$str =~ s/%([a-fA-F0-9][a-fA-F0-9])/pack("C",hex($1))/eg; |
|
return($str); |
|
} |
|
|
|
=pod |
|
|
|
B<escape> - translate strange characters to unstrange escaped syntax. |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$str> - string with potentially weird characters to unweird-ify. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<string> - unweird-ified string. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub escape ($) |
|
{ |
|
my $str = shift(@_); |
|
$str =~ s/(\W)/"%".unpack('H2',$1)/eg; |
|
return($str); |
|
} |
|
|
|
=pod |
|
|
|
B<build_on_the_fly_dynamic_metadata> - evaluate and store dynamic metadata. |
|
|
|
Dynamic metadata is stored in a nohist_resevaldata GDBM database. |
|
Most of the calculations in this subroutine are totally pointless |
|
and not useful for anything that this subroutine does. |
|
(THIS IS A FRUSTRATED SUBROUTINE THAT IS NON-OPTIMAL, *&*&!.) |
|
The only thing that this subroutine really makes happen is adjusting |
|
a 'count' value inside the F<nohist_new_resevaldata.db> as well |
|
as updating F<nohist_new_resevaldata.db> with information from |
|
F<nohist_resevaldata.db>. |
|
|
|
=over 4 |
&write_type_count(); |
|
&write_copyright_count(); |
|
|
Parameters: |
exit 0; |
|
|
=item I<$url> - the filesystem path (url may be a misnomer...) |
## |
|
## Status logging routine. Inputs: $level, $message |
=back |
## |
|
## $level 0 should be used for normal output and error messages |
=over 4 |
## |
|
## $message does not need to end with \n. In the case of errors |
Returns: |
## the message should contain as much information as possible to |
|
## help in diagnosing the problem. |
=item C<hash> - key-value table of dynamically evaluated metadata. |
## |
|
sub log { |
=back |
my ($level,$message)=@_; |
|
$level = 0 if (! defined($level)); |
=cut |
if ($verbose >= $level) { |
|
print LOG $message.$/; |
sub build_on_the_fly_dynamic_metadata ($) |
} |
{ |
} |
# BEWARE ALL WHO TRY TO UNDERSTAND THIS ABSURDLY HORRIBLE SUBROUTINE. |
|
|
sub portfolio_logging { |
# Do all sorts of mumbo-jumbo to compute the user's directory. |
my (%portlog) = @_; |
my $url = &declutter(shift(@_)); |
foreach my $key (keys(%portlog)) { |
$url =~ s/\.meta$//; |
if (ref($portlog{$key}) eq 'HASH') { |
my %returnhash = (); |
foreach my $item (keys(%{$portlog{$key}})) { |
my ($adomain,$aauthor) = ($url =~ m!^(\w+)/(\w+)/!); |
&log(0,$portlog{$key}{$item}); |
my $user_directory = &construct_path_to_user_directory($adomain,$aauthor); |
} |
|
} |
# Attempt a GDBM database instantiation inside users directory and proceed. |
} |
if ((tie(%evaldata,'GDBM_File', |
} |
$user_directory. |
|
'/nohist_resevaldata.db',&GDBM_READER(),0640)) && |
sub descend_tree { |
(tie(%newevaldata,'GDBM_File', |
my ($dom,$dir,$depth,$allportusers,$alldomusers) = @_; |
$user_directory. |
if (-d $dir) { |
'/nohist_new_resevaldata.db',&GDBM_WRCREAT(),0640))) |
opendir(DIR,$dir); |
{ |
my @contents = grep(!/^\./,readdir(DIR)); |
# For different variables, track the running sum and counts. |
closedir(DIR); |
my %sum = (); |
$depth ++; |
my %cnt = (); |
foreach my $item (@contents) { |
|
if (($depth < 4) && (length($item) == 1)) { |
# Define computed items as a sum (add) or an average (avg) or a raw |
&descend_tree($dom,$dir.'/'.$item,$depth,$allportusers,$alldomusers); |
# count (cnt) or 'app'? |
} else { |
my %listitems=('count' => 'add', |
if (-e $dir.'/'.$item.'/file_permissions.db') { |
'course' => 'add', |
$$allportusers{$item} = ''; |
'avetries' => 'avg', |
} |
'stdno' => 'add', |
if (-e $dir.'/'.$item.'/passwd') { |
'difficulty' => 'avg', |
$$alldomusers{$item} = ''; |
'clear' => 'avg', |
} |
'technical' => 'avg', |
} |
'helpful' => 'avg', |
} |
'correct' => 'avg', |
} |
'depth' => 'avg', |
} |
'comments' => 'app', |
|
'usage' => 'cnt' |
######################################################## |
); |
######################################################## |
|
### ### |
# Untaint the url and use as part of a regular expression. |
### File::Find support routines ### |
my $regexp = $url; |
### ### |
$regexp =~ s/(\W)/\\$1/g; |
######################################################## |
$regexp = '___'.$regexp.'___([a-z]+)$'; |
######################################################## |
|
## |
# Check existing nohist database for this url. |
## &only_meta_files |
# THE ONLY TIME THIS IS IMPORTANT FOR THIS AWFUL SUBROUTINE |
## |
# IS FOR 'count' ENTRIES |
## Called by File::Find. |
# AND FOR REFRESHING non-'count' ENTRIES INSIDE nohist_new DATABASE. |
## Takes a list of files/directories in and returns a list of files/directories |
foreach (keys %evaldata) |
## to search. |
{ |
sub only_meta_files { |
my $key = &unescape($_); |
my @PossibleFiles = @_; |
if ($key =~ /$regexp/) # If url-based entry exists. |
my @ChosenFiles; |
{ |
foreach my $file (@PossibleFiles) { |
my $ctype = $1; # Set to specific category type. |
if ( ($file =~ /\.meta$/ && # Ends in meta |
|
$file !~ /\.\d+\.[^\.]+\.meta$/ # is not for a prior version |
# Do an increment for this category type. |
) || (-d $File::Find::dir."/".$file )) { # directories are okay |
if (defined($cnt{$ctype})) |
# but we do not want /. or /.. |
{ |
push(@ChosenFiles,$file); |
$cnt{$ctype}++; |
} |
} |
} |
else |
return @ChosenFiles; |
{ |
} |
$cnt{$ctype} = 1; |
|
} |
## |
unless ($listitems{$ctype} eq 'app') # WHAT DOES 'app' MEAN? |
## |
{ |
## Debugging routines, use these for 'wanted' in the File::Find call |
# Increment the sum based on the evaluated data in the db. |
## |
if (defined($sum{$ctype})) |
sub print_filename { |
{ |
my ($file) = $_; |
$sum{$ctype} += $evaldata{$_}; |
my $fullfilename = $File::Find::name; |
} |
if ($debug) { |
else |
if (-d $file) { |
{ |
&log(5," Got directory ".$fullfilename); |
$sum{$ctype} = $evaldata{$_}; |
} else { |
} |
&log(5," Got file ".$fullfilename); |
} |
} |
else # 'app' mode, means to use '<hr />' as a separator |
} |
{ |
$_=$file; |
if (defined($sum{$ctype})) |
} |
{ |
|
if ($evaldata{$_}) |
sub log_metadata { |
{ |
my ($file) = $_; |
$sum{$ctype} .= '<hr />'.$evaldata{$_}; |
my $fullfilename = $File::Find::name; |
} |
return if (-d $fullfilename); # No need to do anything here for directories |
} |
if ($debug) { |
else |
&log(6,$fullfilename); |
{ |
my $ref = &metadata($fullfilename); |
$sum{$ctype} = ''.$evaldata{$_}; |
if (! defined($ref)) { |
} |
&log(6," No data"); |
} |
return; |
if ($ctype ne 'count') |
} |
{ |
while (my($key,$value) = each(%$ref)) { |
# ALERT! THIS HORRIBLE LOOP IS ACTUALLY DOING SOMETHING |
&log(6," ".$key." => ".$value); |
# USEFUL! |
} |
$newevaldata{$_} = $evaldata{$_}; |
&count_copyright($ref->{'copyright'}); |
} |
} |
} |
$_=$file; |
} |
} |
|
|
# THE ONLY OTHER TIME THIS LOOP IS USEFUL IS FOR THE 'count' HASH |
## |
# ELEMENT. |
## process_meta_file |
foreach (keys %cnt) |
## Called by File::Find. |
{ |
## Only input is the filename in $_. |
if ($listitems{$_} eq 'avg') |
sub process_meta_file { |
{ |
my ($file) = $_; |
$returnhash{$_} = int(($sum{$_}/$cnt{$_})*100.0+0.5)/100.0; |
my $filename = $File::Find::name; # full filename |
} |
return if (-d $filename); # No need to do anything here for directories |
elsif ($listitems{$_} eq 'cnt') |
# |
{ |
&log(3,$filename) if ($debug); |
$returnhash{$_} = $cnt{$_}; |
# |
} |
my $ref = &metadata($filename); |
else |
# |
{ |
# $url is the original file url, not the metadata file |
$returnhash{$_} = $sum{$_}; |
my $target = $filename; |
} |
$target =~ s/\.meta$//; |
} |
my $url='/res/'.&declutter($target); |
|
&log(3," ".$url) if ($debug); |
# A RARE MOMENT OF DOING ANYTHING USEFUL INSIDE THIS |
# |
# BLEEPING SUBROUTINE. |
# Ignore some files based on their metadata |
if ($returnhash{'count'}) |
if ($ref->{'obsolete'}) { |
{ |
&log(3,"obsolete") if ($debug); |
my $newkey = $$.'_'.time.'_searchcat___'.&escape($url).'___count'; |
return; |
$newevaldata{$newkey} = $returnhash{'count'}; |
} |
} |
&count_copyright($ref->{'copyright'}); |
|
if ($ref->{'copyright'} eq 'private') { |
untie(%evaldata); # Close/release the original nohist database. |
&log(3,"private") if ($debug); |
untie(%newevaldata); # Close/release the new nohist database. |
return; |
} |
} |
return(%returnhash); |
# |
# Celebrate! We have now accomplished some simple calculations using |
# Find the dynamic metadata |
# 1000% bloated functionality in our subroutine. Go wash your eyeballs |
my %dyn; |
# out now. |
if ($url=~ m:/default$:) { |
} |
$url=~ s:/default$:/:; |
|
&log(3,"Skipping dynamic data") if ($debug); |
=pod |
} else { |
|
&log(3,"Retrieving dynamic data") if ($debug); |
B<wanted> - used by B<File::Find::find> subroutine. |
%dyn=&get_dynamic_metadata($url); |
|
&count_type($url); |
This evaluates whether a file is wanted, and pushes it onto the |
} |
I<@metalist> array. This subroutine was, for the most part, auto-generated |
&LONCAPA::lonmetadata::getfiledates($ref,$target); |
by the B<find2perl> command. |
# |
|
my %Data = ( |
=over 4 |
%$ref, |
|
%dyn, |
Parameters: |
'url'=>$url, |
|
'version'=>'current'); |
=item I<$file> - a path to the file. |
if (! $simulate) { |
|
my ($count,$err) = |
=back |
&LONCAPA::lonmetadata::store_metadata($dbh,$newnames{'metadata'}, |
|
'metadata',\%Data); |
=over 4 |
if ($err) { |
|
&log(0,"MySQL Error Insert: ".$err); |
Returns: |
} |
|
if ($count < 1) { |
=item C<boolean> - true or false based on logical statement. |
&log(0,"Unable to insert record into MySQL database for $url"); |
|
} |
=back |
} |
|
# |
=cut |
# Reset $_ before leaving |
|
$_ = $file; |
sub wanted ($) |
} |
{ |
|
(($dev,$ino,$mode,$nlink,$uid,$gid) = lstat($_)) && |
######################################################## |
-f $_ && |
######################################################## |
/^.*\.meta$/ && !/^.+\.\d+\.[^\.]+\.meta$/ && |
### ### |
push(@metalist,$File::Find::dir.'/'.$_); |
### &metadata($uri) ### |
} |
### Retrieve metadata for the given file ### |
|
### ### |
=pod |
######################################################## |
|
######################################################## |
B<get_metadata_from_file> - read xml-tagged file and return parsed metadata. |
sub metadata { |
|
my ($uri) = @_; |
I<Note that this is significantly altered from a subroutine present in lonnet.> |
my %metacache=(); |
|
$uri=&declutter($uri); |
=over 4 |
my $filename=$uri; |
|
$uri=~s/\.meta$//; |
Parameters: |
$uri=''; |
|
if ($filename !~ /\.meta$/) { |
=item I<$file> - a path.to the file. |
$filename.='.meta'; |
|
} |
=back |
my $metastring = |
|
&LONCAPA::lonmetadata::getfile($Apache::lonnet::perlvar{'lonDocRoot'}.'/res/'.$filename); |
=over 4 |
return undef if (! defined($metastring)); |
|
my $parser=HTML::TokeParser->new(\$metastring); |
Returns: |
|
|
|
=item C<hash reference> - a hash array (keys and values). |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub get_metadata_from_file ($) |
|
{ |
|
my ($filename) = @_; |
|
my %metatable; # Used to store return value of hash-tabled metadata. |
|
$filename = &declutter($filename); # Remove non-identifying filesystem info |
|
my $uri = ''; # The URI is not relevant in this scenario. |
|
unless ($filename =~ m/\.meta$/) # Unless ending with .meta. |
|
{ |
|
$filename .= '.meta'; # Append a .meta suffix. |
|
} |
|
# Get the file contents. |
|
my $metadata_string = |
|
&get_file_contents($perlvar{'lonDocRoot'}.'/res/'.$filename); |
|
|
|
# Parse the file based on its XML tags. |
|
my $parser = HTML::TokeParser->new(\$metadata_string); |
|
my $token; |
my $token; |
while ($token = $parser->get_token) # Loop through tokens. |
while ($token=$parser->get_token) { |
{ |
if ($token->[0] eq 'S') { |
if ($token->[0] eq 'S') # If it is a start token. |
my $entry=$token->[1]; |
{ |
my $unikey=$entry; |
my $entry = $token->[1]; |
if (defined($token->[2]->{'part'})) { |
my $unikey = $entry; # A unique identifier for this xml tag key. |
$unikey.='_'.$token->[2]->{'part'}; |
if (defined($token->[2]->{'part'})) |
} |
{ |
if (defined($token->[2]->{'name'})) { |
$unikey .= '_'.$token->[2]->{'part'}; |
$unikey.='_'.$token->[2]->{'name'}; |
} |
} |
if (defined($token->[2]->{'name'})) |
if ($metacache{$uri.'keys'}) { |
{ |
$metacache{$uri.'keys'}.=','.$unikey; |
$unikey .= '_'.$token->[2]->{'name'}; |
} else { |
} |
$metacache{$uri.'keys'}=$unikey; |
# Append $unikey to metatable's keys entry. |
} |
if ($metatable{$uri.'keys'}) |
foreach ( @{$token->[3]}) { |
{ |
$metacache{$uri.''.$unikey.'.'.$_}=$token->[2]->{$_}; |
$metatable{$uri.'keys'} .= ','.$unikey; |
} |
} |
if (! ($metacache{$uri.''.$unikey}=$parser->get_text('/'.$entry))){ |
else |
$metacache{$uri.''.$unikey} = |
{ |
$metacache{$uri.''.$unikey.'.default'}; |
$metatable{$uri.'keys'} = $unikey; |
} |
} |
} # End of ($token->[0] eq 'S') |
# Insert contents into metatable entry for the unikey. |
} |
foreach my $t3 (@{$token->[3]}) |
return \%metacache; |
{ |
} |
$metatable{$uri.''.$unikey.'.'.$_} = $token->[2]->{$t3}; |
|
} |
######################################################## |
# If there was no text contained inside the tags, set = default. |
######################################################## |
unless |
### ### |
( |
### Dynamic Metadata ### |
$metatable{$uri.''.$unikey} = $parser->get_text('/'.$entry) |
### ### |
) |
######################################################## |
{ |
######################################################## |
$metatable{$uri.''.$unikey} = |
## |
$metatable{$uri.''.$unikey.'.default'}; |
## Dynamic metadata description (incomplete) |
} |
## |
} |
## For a full description of all fields, |
} |
## see LONCAPA::lonmetadata |
# Return with a key-value table of XML tags and their tag contents. |
## |
return(\%metatable); |
## Field Type |
} |
##----------------------------------------------------------- |
|
## count integer |
=pod |
## course integer |
|
## course_list comma separated list of course ids |
B<get_file_contents> - returns either the contents of the file or a -1. |
## avetries real |
|
## avetries_list comma separated list of real numbers |
=over 4 |
## stdno real |
|
## stdno_list comma separated list of real numbers |
Parameters: |
## usage integer |
|
## usage_list comma separated list of resources |
=item I<$file> - a complete filesystem path.to the file. |
## goto scalar |
|
## goto_list comma separated list of resources |
=back |
## comefrom scalar |
|
## comefrom_list comma separated list of resources |
=over 4 |
## difficulty real |
|
## difficulty_list comma separated list of real numbers |
Returns: |
## sequsage scalar |
|
## sequsage_list comma separated list of resources |
=item C<string> - file contents or a -1. |
## clear real |
|
## technical real |
=back |
## correct real |
|
## helpful real |
|
## depth real |
|
## comments html of all the comments made |
|
## |
|
{ |
|
|
|
my %DynamicData; |
|
my %Counts; |
|
|
|
sub process_dynamic_metadata { |
|
my ($user,$dom) = @_; |
|
undef(%DynamicData); |
|
undef(%Counts); |
|
# |
|
my $prodir = &propath($dom,$user); |
|
# |
|
# Read in the dynamic metadata |
|
my %evaldata; |
|
if (! tie(%evaldata,'GDBM_File', |
|
$prodir.'/nohist_resevaldata.db',&GDBM_READER(),0640)) { |
|
return 0; |
|
} |
|
# |
|
%DynamicData = &LONCAPA::lonmetadata::process_reseval_data(\%evaldata); |
|
untie(%evaldata); |
|
$DynamicData{'domain'} = $dom; |
|
#print('user = '.$user.' domain = '.$dom.$/); |
|
# |
|
# Read in the access count data |
|
&log(7,'Reading access count data') if ($debug); |
|
my %countdata; |
|
if (! tie(%countdata,'GDBM_File', |
|
$prodir.'/nohist_accesscount.db',&GDBM_READER(),0640)) { |
|
return 0; |
|
} |
|
while (my ($key,$count) = each(%countdata)) { |
|
next if ($key !~ /^$dom/); |
|
$key = &unescape($key); |
|
&log(8,' Count '.$key.' = '.$count) if ($debug); |
|
$Counts{$key}=$count; |
|
} |
|
untie(%countdata); |
|
if ($debug) { |
|
&log(7,scalar(keys(%Counts)). |
|
" Counts read for ".$user."@".$dom); |
|
&log(7,scalar(keys(%DynamicData)). |
|
" Dynamic metadata read for ".$user."@".$dom); |
|
} |
|
# |
|
return 1; |
|
} |
|
|
|
sub get_dynamic_metadata { |
|
my ($url) = @_; |
|
$url =~ s:^/res/::; |
|
my %data = &LONCAPA::lonmetadata::process_dynamic_metadata($url, |
|
\%DynamicData); |
|
# find the count |
|
$data{'count'} = $Counts{$url}; |
|
# |
|
# Log the dynamic metadata |
|
if ($debug) { |
|
while (my($k,$v)=each(%data)) { |
|
&log(8," ".$k." => ".$v); |
|
} |
|
} |
|
return %data; |
|
} |
|
|
|
} # End of %DynamicData and %Counts scope |
|
|
|
######################################################## |
|
######################################################## |
|
### ### |
|
### Counts ### |
|
### ### |
|
######################################################## |
|
######################################################## |
|
{ |
|
|
|
my %countext; |
|
|
|
sub count_type { |
|
my $file=shift; |
|
$file=~/\.(\w+)$/; |
|
my $ext=lc($1); |
|
$countext{$ext}++; |
|
} |
|
|
|
sub write_type_count { |
|
open(RESCOUNT,'>/home/httpd/html/lon-status/rescount.txt'); |
|
while (my ($extension,$count) = each(%countext)) { |
|
print RESCOUNT $extension.'='.$count.'&'; |
|
} |
|
print RESCOUNT 'time='.time."\n"; |
|
close(RESCOUNT); |
|
} |
|
|
|
} # end of scope for %countext |
|
|
|
{ |
|
|
|
my %copyrights; |
|
|
|
sub count_copyright { |
|
$copyrights{@_[0]}++; |
|
} |
|
|
|
sub write_copyright_count { |
|
open(COPYCOUNT,'>/home/httpd/html/lon-status/copyrightcount.txt'); |
|
while (my ($copyright,$count) = each(%copyrights)) { |
|
print COPYCOUNT $copyright.'='.$count.'&'; |
|
} |
|
print COPYCOUNT 'time='.time."\n"; |
|
close(COPYCOUNT); |
|
} |
|
|
|
} # end of scope for %copyrights |
|
|
|
######################################################## |
|
######################################################## |
|
### ### |
|
### Miscellanous Utility Routines ### |
|
### ### |
|
######################################################## |
|
######################################################## |
|
## |
|
## &ishome($username) |
|
## Returns 1 if $username is a LON-CAPA author, 0 otherwise |
|
## (copied from lond, modification of the return value) |
|
sub ishome { |
|
my $author=shift; |
|
$author=~s{/home/httpd/html/res/([^/]*)/([^/]*).*}{$1/$2}; |
|
my ($udom,$uname)=split(/\//,$author); |
|
my $proname=propath($udom,$uname); |
|
if (-e $proname) { |
|
return 1; |
|
} else { |
|
return 0; |
|
} |
|
} |
|
|
|
## |
|
## &declutter($filename) |
|
## Given a filename, returns a url for the filename. |
|
sub declutter { |
|
my $thisfn=shift; |
|
$thisfn=~s/^$Apache::lonnet::perlvar{'lonDocRoot'}//; |
|
$thisfn=~s/^\///; |
|
$thisfn=~s/^res\///; |
|
return $thisfn; |
|
} |
|
|
=cut |
|
|
|
sub get_file_contents ($) |
|
{ |
|
my $file = shift(@_); |
|
|
|
# If file does not exist, then return a -1 value. |
|
unless (-e $file) |
|
{ |
|
return(-1); |
|
} |
|
|
|
# Read in file contents. |
|
my $file_handle = IO::File->new($file); |
|
my $file_contents = ''; |
|
while (<$file_handle>) |
|
{ |
|
$file_contents .= $_; |
|
} |
|
|
|
# Return file contents. |
|
return($file_contents); |
|
} |
|
|
|
=pod |
|
|
|
B<declutter> - Declutters URLs (remove extraneous prefixed filesystem path). |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$filesystem_path> - a complete filesystem path. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<string> - remnants of the filesystem path (beginning portion removed). |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub declutter |
|
{ |
|
my $filesystem_path = shift(@_); |
|
|
|
# Remove beginning portions of the filesystem path. |
|
$filesystem_path =~ s/^$perlvar{'lonDocRoot'}//; |
|
$filesystem_path =~ s!^/!!; |
|
$filesystem_path =~ s!^res/!!; |
|
|
|
# Return what is remaining for the filesystem path. |
|
return($filesystem_path); |
|
} |
|
|
|
=pod |
|
|
|
B<query_home_server_status> - Is this the home server of an author's directory? |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$author_filesystem_path> - directory path for a user. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<boolean> - 1 if true; 0 if false. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub query_home_server_status ($) |
|
{ |
|
my $author_filesystem_path = shift(@_); |
|
|
|
# Remove beginning portion of this filesystem path. |
|
$author_filesystem_path =~ s!/home/httpd/html/res/([^/]*)/([^/]*).*!$1/$2!; |
|
|
|
# Construct path to the author's ordinary user directory. |
|
my ($user_domain,$username) = split(m!/!,$author_filesystem_path); |
|
my $user_directory_path = construct_path_to_user_directory($user_domain, |
|
$username); |
|
|
|
# Return status of whether the user directory path is defined. |
|
if (-e $user_directory_path) |
|
{ |
|
return(1); # True. |
|
} |
|
else |
|
{ |
|
return(0); # False. |
|
} |
|
} |
|
|
|
=pod |
|
|
|
B<construct_path_to_user_directory> ($$) - makes a filesystem path to user dir. |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$user_domain> - the loncapa domain of the user. |
|
|
|
=item I<$username> - the unique username (user id) of the user. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<string> - representing the path on the filesystem. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub construct_path_to_user_directory ($$) |
|
{ |
|
my ($user_domain,$username) = @_; |
|
|
|
# Untaint. |
|
$user_domain =~ s/\W//g; |
|
$username =~ s/\W//g; |
|
|
|
# Create three levels of sub-directoried filesystem path |
|
# based on the first three characters of the username. |
|
my $sub_filesystem_path = $username.'__'; |
|
$sub_filesystem_path =~ s!(.)(.)(.).*!$1/$2/$3/!; |
|
|
|
# Use the sub-directoried levels and other variables to generate |
|
# the complete filesystem path. |
|
my $complete_filesystem_path = |
|
join('/',($perlvar{'lonUsersDir'}, |
|
$user_domain, |
|
$sub_filesystem_path, |
|
$username)); |
|
|
|
# Return the complete filesystem path. |
|
return($complete_filesystem_path); |
|
} |
|
|
|
=pod |
|
|
|
B<sql_formatted_time> (@) - turns seconds since epoch into datetime sql format. |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$epochtime> - time in seconds since epoch (may need to be sanitized). |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<string> - datetime sql formatted string. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub sql_formatted_time ($) |
|
{ |
|
# Sanitize the time argument and convert to localtime array. |
|
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = |
|
localtime(&sanitize_time(shift(@_))); |
|
|
|
# Convert month from (0..11) to (1..12). |
|
$mon += 1; |
|
|
|
# Make the year compatible with A.D. specification. |
|
$year += 1900; |
|
|
|
# Return a date which is compatible with MySQL's "DATETIME" format. |
|
return(join('-',($year,$mon,$mday)). |
|
' '. |
|
join(':',($hour,$min,$sec)) |
|
); |
|
} |
|
|
|
|
|
# ==================================== The following two subroutines are needed |
|
# for accommodating incorrect time formats inside the metadata. |
|
|
|
=pod |
|
|
|
B<make_seconds_since_epoch> (@) - turns time metadata into seconds since epoch. |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<%time_metadata> - a key-value listing characterizing month, year, etc. |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<integer> - seconds since epoch. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub make_seconds_since_epoch (@) |
|
{ |
|
# Keytable of time metadata. |
|
my %time_metadata = @_; |
|
|
|
# Return seconds since the epoch (January 1, 1970, 00:00:00 UTC). |
|
return(POSIX::mktime( |
|
($time_metadata{'seconds'}, |
|
$time_metadata{'minutes'}, |
|
$time_metadata{'hours'}, |
|
$time_metadata{'day'}, |
|
$time_metadata{'month'}-1, |
|
$time_metadata{'year'}-1900, |
|
0, |
|
0, |
|
$time_metadata{'dlsav'}) |
|
) |
|
); |
|
} |
|
|
|
=pod |
|
|
|
B<sanitize_time> - if time looks sql-formatted, make it seconds since epoch. |
|
|
|
Somebody described this subroutine as |
|
"retro-fixing of un-backward-compatible time format". |
|
|
|
What this means, is that a part of this code expects to get UTC seconds |
|
since the epoch (beginning of 1970). Yet, some of the .meta files have |
|
sql-formatted time strings (2001-04-01, etc.) instead of seconds-since-epoch |
|
integers (e.g. 1044147435). These time strings do not encode the timezone |
|
and, in this sense, can be considered "un-backwards-compatible". |
|
|
|
=over 4 |
|
|
|
Parameters: |
|
|
|
=item I<$potentially_badformat_string> - string to "retro-fix". |
|
|
|
=back |
|
|
|
=over 4 |
|
|
|
Returns: |
|
|
|
=item C<integer> - seconds since epoch. |
|
|
|
=back |
|
|
|
=cut |
|
|
|
sub sanitize_time ($) |
|
{ |
|
my $timestamp = shift(@_); |
|
# If timestamp is in this unexpected format.... |
|
if ($timestamp =~ /^(\d+)\-(\d+)\-(\d+)\s+(\d+)\:(\d+)\:(\d+)$/) |
|
{ |
|
# then convert into seconds since epoch (the expected format). |
|
$timestamp = &make_seconds_since_epoch( |
|
'year' => $1, |
|
'month' => $2, |
|
'day' => $3, |
|
'hours' => $4, |
|
'minutes' => $5, |
|
'seconds' => $6 |
|
); |
|
} |
|
# Otherwise we assume timestamp to be as expected. |
|
return($timestamp); |
|
} |
|
|
|
=pod |
|
|
|
=head1 AUTHOR |
|
|
|
Written to help the loncapa project. |
|
|
|
Scott Harrison, sharrison@users.sourceforge.net |
|
|
|
This is distributed under the same terms as loncapa (i.e. "freeware"). |
|
|
|
=cut |
|