X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=lib%2FDBIx%2FClass%2FAdmin.pm;h=b0d76b8ae054e2cbc266814e24aa8dc4a5dfd7a9;hb=fcf32d045;hp=c2d9bb63e6078382dfe0112a6fe381a26c08c9f4;hpb=a705b1758c359438b683daa2c2b1e8cb5a3377da;p=dbsrgits%2FDBIx-Class.git diff --git a/lib/DBIx/Class/Admin.pm b/lib/DBIx/Class/Admin.pm index c2d9bb6..b0d76b8 100644 --- a/lib/DBIx/Class/Admin.pm +++ b/lib/DBIx/Class/Admin.pm @@ -1,66 +1,37 @@ -# -#=============================================================================== -# -# FILE: Admin.pm -# -# DESCRIPTION: Administrative functions for DBIx::Class Schemata -# -# FILES: --- -# BUGS: --- -# NOTES: --- -# AUTHOR: Gordon Irving (), -# VERSION: 1.0 -# CREATED: 28/11/09 12:27:15 GMT -# REVISION: --- -#=============================================================================== - package DBIx::Class::Admin; +# check deps +BEGIN { + use DBIx::Class; + die('The following modules are required for DBIx::Class::Admin ' . DBIx::Class::Optional::Dependencies->req_missing_for ('admin') ) + unless DBIx::Class::Optional::Dependencies->req_ok_for ('admin'); +} + use Moose; -use MooseX::Types - -declare => [qw( DBICConnectInfo )]; -use MooseX::Types::Moose qw/Int HashRef ArrayRef Str Any/; +use MooseX::Types::Moose qw/Int Str Any Bool/; +use DBIx::Class::Admin::Types qw/DBICConnectInfo DBICHashRef/; use MooseX::Types::JSON qw(JSON); use MooseX::Types::Path::Class qw(Dir File); use Try::Tiny; - -use parent 'Class::C3::Componentised'; - -use JSON::Any; - +use JSON::Any qw(DWIW XS JSON); use namespace::autoclean; -my @_deps = qw(Moose MooseX::Types MooseX::Types::JSON MooseX::Types::Path::Class Try::Tiny parent JSON::Any Class::C3::Componentised namespace::autoclean); - -coerce ArrayRef, - from JSON, - via { _json_to_data ($_) }; - -coerce HashRef, - from JSON, - via { _json_to_data($_) }; - -subtype DBICConnectInfo, - as ArrayRef; - -coerce DBICConnectInfo, - from JSON, - via { return _json_to_data($_) } ; - -coerce DBICConnectInfo, - from Str, - via { return _json_to_data($_) }; - -coerce DBICConnectInfo, - from HashRef, - via { [ $_->{dsn}, $_->{user}, $_->{password} ] }; - =head1 NAME DBIx::Class::Admin - Administration object for schemas =head1 SYNOPSIS + $ dbicadmin --help + + $ dbicadmin --schema=MyApp::Schema \ + --connect='["dbi:SQLite:my.db", "", ""]' \ + --deploy + + $ dbicadmin --schema=MyApp::Schema --class=Employee \ + --connect='["dbi:SQLite:my.db", "", ""]' \ + --op=update --set='{ "name": "New_Employee" }' + use DBIx::Class::Admin; # ddl manipulation @@ -82,101 +53,108 @@ DBIx::Class::Admin - Administration object for schemas # install a version for an unversioned schema $admin->install("3.0"); -=head1 Attributes +=head1 REQUIREMENTS -=head2 lib - -add a library search path -=cut -has lib => ( - is => 'ro', - isa => Dir, - coerce => 1, - trigger => \&_set_inc, -); +The Admin interface has additional requirements not currently part of +L. See L for more details. -sub _set_inc { - my ($self, $lib) = @_; - push @INC, $lib->stringify; -} +=head1 ATTRIBUTES =head2 schema_class the class of the schema to load + =cut + has 'schema_class' => ( - is => 'ro', - isa => 'Str', - coerce => 1, + is => 'ro', + isa => Str, ); + =head2 schema A pre-connected schema object can be provided for manipulation + =cut + has 'schema' => ( - is => 'ro', - isa => 'DBIx::Class::Schema', + is => 'ro', + isa => 'DBIx::Class::Schema', lazy_build => 1, ); sub _build_schema { my ($self) = @_; - $self->ensure_class_loaded($self->schema_class); - $self->connect_info->[3]->{ignore_version} =1; - return $self->schema_class->connect(@{$self->connect_info()} ); # , $self->connect_info->[3], { ignore_version => 1} ); + require Class::MOP; + Class::MOP::load_class($self->schema_class); + $self->connect_info->[3]{ignore_version} = 1; + return $self->schema_class->connect(@{$self->connect_info}); } =head2 resultset a resultset from the schema to operate on + =cut + has 'resultset' => ( - is => 'rw', - isa => Str, + is => 'rw', + isa => Str, ); + =head2 where a hash ref or json string to be used for identifying data to manipulate + =cut has 'where' => ( is => 'rw', - isa => HashRef, - coerce => 1, + isa => DBICHashRef, + coerce => 1, ); + =head2 set + a hash ref or json string to be used for inserting or updating data + =cut has 'set' => ( is => 'rw', - isa => HashRef, - coerce => 1, + isa => DBICHashRef, + coerce => 1, ); + =head2 attrs -a hash ref or json string to be used for passing additonal info to the ->search call + +a hash ref or json string to be used for passing additional info to the ->search call + =cut + has 'attrs' => ( - is => 'rw', - isa => HashRef, - coerce => 1, + is => 'rw', + isa => DBICHashRef, + coerce => 1, ); + + =head2 connect_info connect_info the arguments to provide to the connect call of the schema_class -=cut +=cut has 'connect_info' => ( - is => 'ro', - isa => DBICConnectInfo, + is => 'ro', + isa => DBICConnectInfo, lazy_build => 1, - coerce => 1, + coerce => 1, ); sub _build_connect_info { @@ -184,42 +162,53 @@ sub _build_connect_info { return $self->_find_stanza($self->config, $self->config_stanza); } + =head2 config_file config_file provide a config_file to read connect_info from, if this is provided config_stanze should also be provided to locate where the connect_info is in the config -The config file should be in a format readable by Config::General +The config file should be in a format readable by Config::Any. + =cut + has config_file => ( is => 'ro', - isa => File, - coerce => 1, + isa => File, + coerce => 1, ); + =head2 config_stanza -config_stanza for use with config_file should be a '::' deliminated 'path' to the connection information +config_stanza for use with config_file should be a '::' delimited 'path' to the connection information designed for use with catalyst config files + =cut + has 'config_stanza' => ( - is => 'ro', - isa => 'Str', + is => 'ro', + isa => Str, ); + =head2 config -Instead of loading from a file the configuration can be provided directly as a hash ref. Please note +Instead of loading from a file the configuration can be provided directly as a hash ref. Please note config_stanza will still be required. + =cut + has config => ( - is => 'ro', - isa => HashRef, + is => 'ro', + isa => DBICHashRef, lazy_build => 1, ); sub _build_config { my ($self) = @_; - try { require 'Config::Any'; } catch { die "Config::Any is required to parse the config file"; }; + + try { require Config::Any } + catch { die ("Config::Any is required to parse the config file.\n") }; my $cfg = Config::Any->load_files ( {files => [$self->config_file], use_ext =>1, flatten_to_hash=>1}); @@ -228,57 +217,102 @@ sub _build_config { return $cfg; } + =head2 sql_dir The location where sql ddl files should be created or found for an upgrade. + =cut + has 'sql_dir' => ( is => 'ro', - isa => Dir, - coerce => 1, + isa => Dir, + coerce => 1, +); + + +=head2 sql_type + +The type of sql dialect to use for creating sql files from schema + +=cut + +has 'sql_type' => ( + is => 'ro', + isa => Str, ); =head2 version Used for install, the version which will be 'installed' in the schema + =cut + has version => ( - is => 'rw', - isa => 'Str', + is => 'rw', + isa => Str, ); + =head2 preversion -Previouse version of the schema to create an upgrade diff for, the full sql for that version of the sql must be in the sql_dir +Previous version of the schema to create an upgrade diff for, the full sql for that version of the sql must be in the sql_dir + =cut + has preversion => ( - is => 'rw', - isa => 'Str', + is => 'rw', + isa => Str, ); + =head2 force Try and force certain operations. + =cut + has force => ( - is => 'rw', - isa => 'Bool', + is => 'rw', + isa => Bool, ); + =head2 quiet Be less verbose about actions + =cut + has quiet => ( - is => 'rw', - isa => 'Bool', + is => 'rw', + isa => Bool, ); has '_confirm' => ( - is => 'bare', - isa => 'Bool', + is => 'bare', + isa => Bool, +); + + +=head2 trace + +Toggle DBIx::Class debug output + +=cut + +has trace => ( + is => 'rw', + isa => Bool, + trigger => \&_trigger_trace, ); +sub _trigger_trace { + my ($self, $new, $old) = @_; + $self->schema->storage->debug($new); +} + + =head1 METHODS =head2 create @@ -289,18 +323,21 @@ has '_confirm' => ( =back -L will generate sql for the supplied schema_class in sql_dir. The flavour of sql to -generate can be controlled by suppling a sqlt_type which should be a L name. +C will generate sql for the supplied schema_class in sql_dir. The +flavour of sql to generate can be controlled by supplying a sqlt_type which +should be a L name. Arguments for L can be supplied in the sqlt_args hashref. Optional preversion can be supplied to generate a diff to be used by upgrade. + =cut sub create { my ($self, $sqlt_type, $sqlt_args, $preversion) = @_; $preversion ||= $self->preversion(); + $sqlt_type ||= $self->sql_type(); my $schema = $self->schema(); # create the dir if does not exist @@ -309,6 +346,7 @@ sub create { $schema->create_ddl_dir( $sqlt_type, (defined $schema->schema_version ? $schema->schema_version : ""), $self->sql_dir->stringify, $preversion, $sqlt_args ); } + =head2 upgrade =over 4 @@ -319,20 +357,24 @@ sub create { upgrade will attempt to upgrade the connected database to the same version as the schema_class. B + =cut sub upgrade { my ($self) = @_; my $schema = $self->schema(); + if (!$schema->get_db_version()) { # schema is unversioned - die "could not determin current schema version, please either install or deploy"; + $schema->throw_exception ("Could not determin current schema version, please either install() or deploy().\n"); } else { + $schema->upgrade_directory ($self->sql_dir) if $self->sql_dir; # this will override whatever default the schema has my $ret = $schema->upgrade(); return $ret; } } + =head2 install =over 4 @@ -341,11 +383,13 @@ sub upgrade { =back -install is here to help when you want to move to L and have an existing -database. install will take a version and add the version tracking tables and 'install' the version. No -further ddl modification takes place. Setting the force attribute to a true value will allow overriding of +install is here to help when you want to move to L and have an existing +database. install will take a version and add the version tracking tables and 'install' the version. No +further ddl modification takes place. Setting the force attribute to a true value will allow overriding of already versioned databases. + =cut + sub install { my ($self, $version) = @_; @@ -353,23 +397,23 @@ sub install { $version ||= $self->version(); if (!$schema->get_db_version() ) { # schema is unversioned - print "Going to install schema version\n"; + print "Going to install schema version\n" if (!$self->quiet); my $ret = $schema->install($version); - print "retun is $ret\n"; + print "return is $ret\n" if (!$self->quiet); } elsif ($schema->get_db_version() and $self->force ) { - warn "forcing install may not be a good idea"; + warn "Forcing install may not be a good idea\n"; if($self->_confirm() ) { - # FIXME private api $self->schema->_set_db_version({ version => $version}); } } else { - die "schema already has a version not installing, try upgrade instead"; + $schema->throw_exception ("Schema already has a version. Try upgrade instead.\n"); } } + =head2 deploy =over 4 @@ -378,26 +422,17 @@ sub install { =back -deploy will create the schema at the connected database. C<$args> are passed straight to -L. +deploy will create the schema at the connected database. C<$args> are passed straight to +L. + =cut + sub deploy { my ($self, $args) = @_; my $schema = $self->schema(); - if (!$schema->get_db_version() ) { - # schema is unversioned - $schema->deploy( $args, $self->sql_dir) - or die "could not deploy schema"; - } else { - die "there already is a database with a version here, try upgrade instead"; - } + $schema->deploy( $args, $self->sql_dir ); } - -# FIXME ensure option spec compatability -#die('Do not use the where option with the insert op') if ($where); -#die('Do not use the attrs option with the insert op') if ($attrs); - =head2 insert =over 4 @@ -410,6 +445,7 @@ insert takes the name of a resultset from the schema_class and a hashref of data into that resultset =cut + sub insert { my ($self, $rs, $set) = @_; @@ -423,15 +459,17 @@ sub insert { =head2 update -=over 4 +=over 4 =item Arguments: $rs, $set, $where =back -update takes the name of a resultset from the schema_class, a hashref of data to update and -a where hash used to form the search for the rows to update. +update takes the name of a resultset from the schema_class, a hashref of data to update and +a where hash used to form the search for the rows to update. + =cut + sub update { my ($self, $rs, $set, $where) = @_; @@ -449,8 +487,7 @@ sub update { } } -# FIXME -#die('Do not use the set option with the delete op') if ($set); + =head2 delete =over 4 @@ -459,9 +496,11 @@ sub update { =back -delete takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search. +delete takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search. The found data is deleted and cannot be recovered. + =cut + sub delete { my ($self, $rs, $where, $attrs) = @_; @@ -479,6 +518,7 @@ sub delete { } } + =head2 select =over 4 @@ -487,10 +527,11 @@ sub delete { =back -select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search. +select takes the name of a resultset from the schema_class, a where hashref and a attrs to pass to ->search. The found data is returned in a array ref where the first row will be the columns list. =cut + sub select { my ($self, $rs, $where, $attrs) = @_; @@ -502,7 +543,7 @@ sub select { my @data; my @columns = $resultset->result_source->columns(); - push @data, [@columns];# + push @data, [@columns];# while (my $row = $resultset->next()) { my @fields; @@ -517,12 +558,14 @@ sub select { sub _confirm { my ($self) = @_; - print "Are you sure you want to do this? (type YES to confirm) \n"; + # mainly here for testing return 1 if ($self->meta->get_attribute('_confirm')->get_value($self)); + + print "Are you sure you want to do this? (type YES to confirm) \n"; my $response = ; - return 1 if ($response=~/^YES/); - return; + + return ($response=~/^YES/); } sub _find_stanza { @@ -533,46 +576,21 @@ sub _find_stanza { $cfg = $cfg->{$path}; } else { - die "could not find $stanza in config, $path did not seem to exist"; + die ("Could not find $stanza in config, $path does not seem to exist.\n"); } } + $cfg = $cfg->{connect_info} if exists $cfg->{connect_info}; return $cfg; } -sub _json_to_data { - my ($json_str) = @_; - my $json = JSON::Any->new(allow_barekey => 1, allow_singlequote => 1, relaxed=>1); - my $ret = $json->jsonToObj($json_str); - return $ret; -} - - -{ # deps check - -my @_missing_deps; -foreach my $dep (@_deps) { - eval "require $dep"; - if ($@) { - push @_missing_deps, $dep; - } -} - -if (@_missing_deps > 0) { - die "The following dependecies are missing " . join ",", @_missing_deps; -} - - -} =head1 AUTHOR -Gordon Irving - -with code taken from dbicadmin by -Aran Deltac - +See L. =head1 LICENSE -You may distribute this code under the same terms as Perl itself. +You may distribute this code under the same terms as Perl itself + =cut + 1;