Dear ALL,
When testing a simple program for downloading image files from Wikimedia (reproduced below), I keep getting an error message
Unexpected response status code: 403, which shows that a client is forbidden from accessing a valid URL because of client side issues.
I suspect (but of course I am not sure) that the problem is that this simple test program is blocked by Wikimedia because the program does not provide a "User-Agent header" to fake a browser visit. If this is really the case, how could I provide this information using the HTTPClient library?
Here is the code:
program wikimedia;
{$MODE OBJFPC}{$H+}
uses
Classes, SysUtils, fphttpclient, openssl, opensslsockets;
procedure DumpExceptionCallStack(E: Exception);
var
I: Integer;
Frames: PPointer;
Report: string;
begin
Report := 'Program exception! ' + LineEnding + 'Stacktrace:' + LineEnding + LineEnding;
if E <> nil then
begin
Report := Report + 'Exception class: ' + E.ClassName + LineEnding + 'Message: ' + E.Message + LineEnding;
end;
Report := Report + BackTraceStrFunc(ExceptAddr);
Frames := ExceptFrames;
for I := 0 to ExceptFrameCount - 1
do Report := Report + LineEnding + BackTraceStrFunc(Frames[I]);
WriteLn(Report);
Halt;
end;
function GetUrlAs(Url: String; AsName: String): Boolean;
begin
Result := False;
with TFPHttpClient.Create(nil) do
try
AllowRedirect := True;
if (ExtractFilePath(AsName) <> '') then
if not DirectoryExists(ExtractFilePath(AsName)) then
if not ForceDirectories(ExtractFilePath(AsName)) then Exit;
try
WriteLn(trim(FormatDateTime('h:nn:ss AM/PM MM/DD/YYYY', now)) + ' GET: ' + Url);
Get(Url, AsName);
Result := True;
finally
Free;
end;
except
on E: Exception do DumpExceptionCallStack(E);
end;
end;
procedure ReportStatus(Success: Boolean); inline;
begin
if Success then WriteLn('Succes') else WriteLn('Failure');
end;
const
URL1 = 'http://commons.wikimedia.org/wiki/Special:Filepath/Illustration_Vicia_faba1.jpg';
URL2 = 'http://commons.wikimedia.org/wiki/Special:Filepath/Broadbean_Yield.png';
URL3 = 'http://commons.wikimedia.org/wiki/Special:Filepath/Fava_beans_1.jpg';
var
TargetDir : String;
SomeDirectory : String;
SomeFolderName : String;
RetVal : boolean;
begin
InitSSLInterface;
SomeDirectory := IncludeTrailingPathDelimiter(GetCurrentDir);
SomeFolderName := 'pictures';
TargetDir := IncludeTrailingPathDelimiter(SomeDirectory + SomeFolderName);
WriteLn('Attempting to download URL''s into directory ', TargetDir);
//RetVal := GetUrlAs(URL1, TargetDir + 'pic1.jpg');
RetVal := GetUrlAs(URL1, TargetDir + ExtractFileName(URL1));
ReportStatus(RetVal);
Sleep(1000);
RetVal := GetUrlAs(URL2, TargetDir + ExtractFileName(URL2));
ReportStatus(RetVal);
Sleep(1000);
RetVal := GetUrlAs(URL3, TargetDir + ExtractFileName(URL3));
ReportStatus(RetVal);
WriteLn('Done!');
end.
Could some of you wizards give me some hints on how to get this working?
Thanks in advance!
With warmest regards,